ansible-playbook [core 2.17.7] config file = None configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/local/lib/python3.12/site-packages/ansible ansible collection location = /tmp/collections-BPh executable location = /usr/local/bin/ansible-playbook python version = 3.12.8 (main, Dec 3 2024, 00:00:00) [GCC 14.2.1 20241104 (Red Hat 14.2.1-6)] (/usr/bin/python3.12) jinja version = 3.1.4 libyaml = True No config file found; using defaults running playbook inside collection fedora.linux_system_roles redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.debug to ansible.posix.debug redirecting (type: callback) ansible.builtin.profile_tasks to ansible.posix.profile_tasks Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_quadlet_demo.yml *********************************************** 2 plays in /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml PLAY [all] ********************************************************************* TASK [Include vault variables] ************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:5 Saturday 11 January 2025 11:31:13 -0500 (0:00:00.007) 0:00:00.007 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_test_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n35383939616163653333633431363463313831383037386236646138333162396161356130303461\n3932623930643263313563336163316337643562333936360a363538636631313039343233383732\n38666530383538656639363465313230343533386130303833336434303438333161656262346562\n3362626538613031640a663330613638366132356534363534353239616666653466353961323533\n6565\n" }, "mysql_container_root_password": { "__ansible_vault": "$ANSIBLE_VAULT;1.1;AES256\n61333932373230333539663035366431326163363166363036323963623131363530326231303634\n6635326161643165363366323062333334363730376631660a393566366139353861656364656661\n38653463363837336639363032646433666361646535366137303464623261313663643336306465\n6264663730656337310a343962353137386238383064646533366433333437303566656433386233\n34343235326665646661623131643335313236313131353661386338343366316261643634653633\n3832313034366536616531323963333234326461353130303532\n" } }, "ansible_included_var_files": [ "/tmp/podman-9hc/tests/vars/vault-variables.yml" ], "changed": false } PLAY [Deploy the quadlet demo app] ********************************************* TASK [Gathering Facts] ********************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 Saturday 11 January 2025 11:31:13 -0500 (0:00:00.022) 0:00:00.029 ****** [WARNING]: Platform linux on host managed-node2 is using the discovered Python interpreter at /usr/bin/python3.12, but future installation of another Python interpreter could change the meaning of that path. See https://docs.ansible.com/ansible- core/2.17/reference_appendices/interpreter_discovery.html for more information. ok: [managed-node2] TASK [Test is only supported on x86_64] **************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:38 Saturday 11 January 2025 11:31:14 -0500 (0:00:01.361) 0:00:01.390 ****** skipping: [managed-node2] => { "false_condition": "ansible_facts[\"architecture\"] != \"x86_64\"" } TASK [End test] **************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:45 Saturday 11 January 2025 11:31:14 -0500 (0:00:00.020) 0:00:01.410 ****** META: end_play conditional evaluated to False, continuing play skipping: [managed-node2] => { "skip_reason": "end_play conditional evaluated to False, continuing play" } MSG: end_play TASK [Generate certificates] *************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:51 Saturday 11 January 2025 11:31:14 -0500 (0:00:00.012) 0:00:01.423 ****** included: fedora.linux_system_roles.certificate for managed-node2 TASK [fedora.linux_system_roles.certificate : Set version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:2 Saturday 11 January 2025 11:31:14 -0500 (0:00:00.063) 0:00:01.487 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.certificate : Ensure ansible_facts used by role] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:2 Saturday 11 January 2025 11:31:14 -0500 (0:00:00.043) 0:00:01.531 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__certificate_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.certificate : Check if system is ostree] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:10 Saturday 11 January 2025 11:31:14 -0500 (0:00:00.043) 0:00:01.575 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.certificate : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:15 Saturday 11 January 2025 11:31:15 -0500 (0:00:00.477) 0:00:02.053 ****** ok: [managed-node2] => { "ansible_facts": { "__certificate_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.certificate : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/set_vars.yml:19 Saturday 11 January 2025 11:31:15 -0500 (0:00:00.037) 0:00:02.090 ****** skipping: [managed-node2] => (item=RedHat.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "RedHat.yml", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__certificate_certmonger_packages": [ "certmonger", "python3-packaging" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Saturday 11 January 2025 11:31:15 -0500 (0:00:00.065) 0:00:02.155 ****** changed: [managed-node2] => { "changed": true, "rc": 0, "results": [ "Installed: python3-pyasn1-0.6.1-1.el10.noarch", "Installed: python3-cffi-1.16.0-7.el10.x86_64", "Installed: python3-ply-3.11-25.el10.noarch", "Installed: python3-pycparser-2.20-16.el10.noarch", "Installed: python3-cryptography-43.0.0-4.el10.x86_64" ] } lsrpackages: python3-cryptography python3-dbus python3-pyasn1 TASK [fedora.linux_system_roles.certificate : Ensure provider packages are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 Saturday 11 January 2025 11:31:17 -0500 (0:00:01.881) 0:00:04.037 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "rc": 0, "results": [ "Installed: nspr-4.35.0-34.el10.x86_64", "Installed: nss-3.101.0-13.el10.x86_64", "Installed: python3-packaging-23.2-6.el10.noarch", "Installed: nss-util-3.101.0-13.el10.x86_64", "Installed: dbus-tools-1:1.14.10-5.el10.x86_64", "Installed: nss-softokn-3.101.0-13.el10.x86_64", "Installed: nss-softokn-freebl-3.101.0-13.el10.x86_64", "Installed: certmonger-0.79.20-3.el10.x86_64", "Installed: nss-sysinit-3.101.0-13.el10.x86_64" ] } lsrpackages: certmonger python3-packaging TASK [fedora.linux_system_roles.certificate : Ensure pre-scripts hooks directory exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:35 Saturday 11 January 2025 11:31:20 -0500 (0:00:02.909) 0:00:06.946 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//pre-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure post-scripts hooks directory exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:61 Saturday 11 January 2025 11:31:20 -0500 (0:00:00.527) 0:00:07.474 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/etc/certmonger//post-scripts", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.certificate : Ensure provider service is running] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 Saturday 11 January 2025 11:31:21 -0500 (0:00:00.457) 0:00:07.931 ****** changed: [managed-node2] => (item=certmonger) => { "__certificate_provider": "certmonger", "ansible_loop_var": "__certificate_provider", "changed": true, "enabled": true, "name": "certmonger", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:certmonger_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice network.target sysinit.target basic.target dbus-broker.service syslog.target dbus.socket systemd-journald.socket", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedorahosted.certmonger", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "Certificate monitoring and PKI enrollment", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/certmonger (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/certmonger ; argv[]=/usr/sbin/certmonger -S -p /run/certmonger.pid -n $OPTS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/certmonger.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "certmonger.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3205324800", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "certmonger.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "PIDFile": "/run/certmonger.pid", "PartOf": "dbus-broker.service", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "sysinit.target dbus.socket system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "disabled", "UtmpMode": "init", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.certificate : Ensure certificate requests] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:101 Saturday 11 January 2025 11:31:22 -0500 (0:00:01.176) 0:00:09.108 ****** changed: [managed-node2] => (item={'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}) => { "ansible_loop_var": "item", "changed": true, "item": { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } } MSG: Certificate requested (new). TASK [fedora.linux_system_roles.certificate : Slurp the contents of the files] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 Saturday 11 January 2025 11:31:23 -0500 (0:00:00.993) 0:00:10.102 ****** ok: [managed-node2] => (item=['cert', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRTnJpNzBmQWFTS0c1RzdiOTRtVFQ0REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTXpaaQpPR0ppWkRFdFpqQXhZVFE0WVRFdFlqa3hZbUkyWm1RdFpUSTJOR1F6WkdZd0hoY05NalV3TVRFeE1UWXpNVEl6CldoY05Nall3TVRFeE1UWXpNVEl5V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFBua0M4MEhGSFEzNW9NckMwNTdHL2NtZGlORGNod0JaRQpGelI3cFNXcnYvMUFLb0hvcVg2STMwM3RLaUVnYTgvbjJOckNFTlJMall3SDNwak1sOVNiZzAvTUJoZ05vUi9zCkVoclBvOUtna0tvc1dDdkdHK2wyU2NwOE9xKzYwTTlLZFVQNDNndTVmb1Z4OXBaNEFOUll4TmdyQlorSkxaRk0KQTNZVGlSYlppWDVyMWxIVlUwY0ROT3djd0xHTzgyUmJjcTZHUUxQdVFZMTIybEg4RmU4U3VKdWVyU0Q4aWhVRAppQ3RnL1pRM0FVdUw5NnJxWmlGWFVDcU11S2VRTnIrSzFaMzFSRm5zOUZSejFFdnYvTjVpYVBSSENFQVFWTU03ClljWGFKU1NGMDYwTitGV0QraGhVMFZGekxsMis2QnNMT3NFMytEZUJXYlZLL2tBenV4OTNBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVJL3NhCjdzREhSMXREc2RvNWlqREc0YkRuemVjd0h3WURWUjBqQkJnd0ZvQVVvcTdkT21WM1prNXdBZWhUUTJzOXdFRmgKRDU0d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFFWGd5K2MvMEkrUTVIL2twSzcwd1VUTUpZTmRIMWViN0ZGWQpNSVYrWGdWNzAzaktuT3dMemVybFllWUNyc3Q0a2ZsWHN0WG00dDlyOUtQdm5HM1JObWRUTUZqeXcvbVZud2t5CkJuRURmKzY4Z1NaSWExbVZCL29yanUwa3JZZnU4elVTdVZwUTU5QkpPVm0vZFdKSUVMRmRPd29vSmNaTWpRQk8KeGFaN1ZZamhpR0JwRHlHdjNBUUpIc3cyalk0WTlJUVdTdjYzTkR4bUYwemN3Q1c4M2JHU3laQkk4dlp4TkFRbQphWWNBWE5YOVVMK0JjRnp0aVZrbTVLS0s1c05nVzVlU0p0eWl6clpHcHhhOHZDQ01LZm9IeVA1QTdrVlhzQ2lFCkJWaFY1eC9yQ0JRc0FoNVFLMkF5cTZOTC8vM3pjRDN4eFpYMHpLWGs0WmRnVG14TDF5VT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "cert", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } ok: [managed-node2] => (item=['key', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBQUklWQVRFIEtFWS0tLS0tCk1JSUV2QUlCQURBTkJna3Foa2lHOXcwQkFRRUZBQVNDQktZd2dnU2lBZ0VBQW9JQkFRRFBua0M4MEhGSFEzNW8KTXJDMDU3Ry9jbWRpTkRjaHdCWkVGelI3cFNXcnYvMUFLb0hvcVg2STMwM3RLaUVnYTgvbjJOckNFTlJMall3SAozcGpNbDlTYmcwL01CaGdOb1Ivc0VoclBvOUtna0tvc1dDdkdHK2wyU2NwOE9xKzYwTTlLZFVQNDNndTVmb1Z4CjlwWjRBTlJZeE5nckJaK0pMWkZNQTNZVGlSYlppWDVyMWxIVlUwY0ROT3djd0xHTzgyUmJjcTZHUUxQdVFZMTIKMmxIOEZlOFN1SnVlclNEOGloVURpQ3RnL1pRM0FVdUw5NnJxWmlGWFVDcU11S2VRTnIrSzFaMzFSRm5zOUZSegoxRXZ2L041aWFQUkhDRUFRVk1NN1ljWGFKU1NGMDYwTitGV0QraGhVMFZGekxsMis2QnNMT3NFMytEZUJXYlZLCi9rQXp1eDkzQWdNQkFBRUNnZjhOUS8wRGdTbHlteFJCdUxKSWs2bmFFYXdicklPUUpINytrYUdSKzhSK3ROZjUKblFiVWUyMnNtazdnV0hWWHIwS3J4SXJJQUNhMDlrUU9oYzhZK2xGa2VjaU5MbWpJdjB2dmJoUWo1eHpwa3pBbgp5Y09VaVgwcWJSN2VZM1BXKys1Q3dWL3g0ajRtcmtwR05MSWZab0IwQnZzMm4rNS9WVmU4Y1ZlYURJUEVxNHR4ClU1UGR2aVN2OHhUOGMveG5kVE5uVyszWCtCa3NzNHlGNTlOWWZ0NnAySTBlUnFPYk1ucFNFMHVHRGJSN1lCdG4KZE0ySjR3OGtEemk3VllSUlRDSE1vcnZ3a2JHUWZJS200dlc5MXNhY2Z6WTVtNVpPWjYrZ1d5c085TEJiOHRiNwovSjY5clorZU9EaHkyY0FWQ25PNHBRL2psSEVmUWthQ0llVXYrRDBDZ1lFQTZqN0ViakRtcnpVWlh2b2FyRjdECnVIQXdibXdFZU8zQzRuVWF1SlJ5dFIwS1ZDRTlvdEZVR09RcmJjaWVhanBPME41eHB2OE5pTXdldktBcEhPUWEKNHhWTkhPazZPN2E3MklyWDI0c3E2TEpEN2U4WWQ4RVZXMm5PRkt2Uzhrd0xpejJOUzRyTmpNT2VpUS9TelNobgoyKy9pU2VQaXJBMlRGajFBS1FzeElBc0NnWUVBNHVac0h6ck1sWXZRNkxFeEU3aFZKKy9wTXQ1WDkrbnE5YjlpCjhrSXhMb2I4aHFkRVZxYUJ6Z0tWRCtObjRlTFlOcFVPZzNEQ1luTDZrWnhSaFBJWEQzeUZ2bmd6b1NjYlhjeHEKZXErRGJ6TWtjR3lsbGpzL2hxRFkzUCtwL2xnTWtHc3F6V1ZwNm15RzZxVlJ2bHNxd0FrUDF2Q2EvSTJ5L0NFUwpVdjN2eGNVQ2dZRUFnK083YmQrU3cydStVVCs0N1B5ZWJxYWNYay9yWkx0YVUyOWtFREZsTkp5WlNDMC9Ld0Y1CnZGb2NLY0Fma2VncW9HZmV6Z3dsYnJTZlNjMVQyTFNHZnYxcllhL2YzZWRSZE9jWUNzRWM4OUFMZDFoOENsRm8KMlFPRnlSWEZ2QzhkQU5VelRTbGwrcS9aLzRvdFF0NVJQQUNoQk1HT3l4a0pqQ2dJakJPRklXa0NnWUVBcGVadwplODBVTk92THpiZHcrTWc0aTBEaE15NWJCNHRqSGxmWFV1YU82MFFMYzdEdFNLWHFGdVRRZEpPREt5Z2ZpcGtsCnlYa3l4NlRtREgxTkpYV1U0dC9iTWs3VkFTbUUydXBSbUc2b0hUQTVubkh5Q0l3OWdJUXV6RGVxNWFzZlZLRG0KNU5NWHhDMmg0QU1NWjdCazY1QTBvcVc5ekpjV25UN1pGUkZTdFRrQ2dZQUt2N3puRXQrS0FvTVgzbGUxZGxyMQorMFV4UjB1bEQxb1lGRUJiVzhTb2pnZjRyRGg1VU83OGNhbG9INVg3aXY1bmUwZXlMa2FxTTlQZkNVK09QdUdRCldxU1hQazNZWEFnbFpvcC9oRE42ejhENnhQS0ZOT1M4RldNTkMxS2xqWXA3bERZaU9vdzltQTg1RzhwblVyUDYKVk9yOStxcU1zL08zMGtWQ2diL1lGQT09Ci0tLS0tRU5EIFBSSVZBVEUgS0VZLS0tLS0K", "encoding": "base64", "item": [ "key", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/private/quadlet_demo.key" } ok: [managed-node2] => (item=['ca', {'name': 'quadlet_demo', 'dns': ['localhost'], 'ca': 'self-sign'}]) => { "ansible_loop_var": "item", "changed": false, "content": "LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURnakNDQW1xZ0F3SUJBZ0lRTnJpNzBmQWFTS0c1RzdiOTRtVFQ0REFOQmdrcWhraUc5dzBCQVFzRkFEQlEKTVNBd0hnWURWUVFEREJkTWIyTmhiQ0JUYVdkdWFXNW5JRUYxZEdodmNtbDBlVEVzTUNvR0ExVUVBd3dqTXpaaQpPR0ppWkRFdFpqQXhZVFE0WVRFdFlqa3hZbUkyWm1RdFpUSTJOR1F6WkdZd0hoY05NalV3TVRFeE1UWXpNVEl6CldoY05Nall3TVRFeE1UWXpNVEl5V2pBVU1SSXdFQVlEVlFRREV3bHNiMk5oYkdodmMzUXdnZ0VpTUEwR0NTcUcKU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRFBua0M4MEhGSFEzNW9NckMwNTdHL2NtZGlORGNod0JaRQpGelI3cFNXcnYvMUFLb0hvcVg2STMwM3RLaUVnYTgvbjJOckNFTlJMall3SDNwak1sOVNiZzAvTUJoZ05vUi9zCkVoclBvOUtna0tvc1dDdkdHK2wyU2NwOE9xKzYwTTlLZFVQNDNndTVmb1Z4OXBaNEFOUll4TmdyQlorSkxaRk0KQTNZVGlSYlppWDVyMWxIVlUwY0ROT3djd0xHTzgyUmJjcTZHUUxQdVFZMTIybEg4RmU4U3VKdWVyU0Q4aWhVRAppQ3RnL1pRM0FVdUw5NnJxWmlGWFVDcU11S2VRTnIrSzFaMzFSRm5zOUZSejFFdnYvTjVpYVBSSENFQVFWTU03ClljWGFKU1NGMDYwTitGV0QraGhVMFZGekxsMis2QnNMT3NFMytEZUJXYlZLL2tBenV4OTNBZ01CQUFHamdaTXcKZ1pBd0N3WURWUjBQQkFRREFnV2dNQlFHQTFVZEVRUU5NQXVDQ1d4dlkyRnNhRzl6ZERBZEJnTlZIU1VFRmpBVQpCZ2dyQmdFRkJRY0RBUVlJS3dZQkJRVUhBd0l3REFZRFZSMFRBUUgvQkFJd0FEQWRCZ05WSFE0RUZnUVVJL3NhCjdzREhSMXREc2RvNWlqREc0YkRuemVjd0h3WURWUjBqQkJnd0ZvQVVvcTdkT21WM1prNXdBZWhUUTJzOXdFRmgKRDU0d0RRWUpLb1pJaHZjTkFRRUxCUUFEZ2dFQkFFWGd5K2MvMEkrUTVIL2twSzcwd1VUTUpZTmRIMWViN0ZGWQpNSVYrWGdWNzAzaktuT3dMemVybFllWUNyc3Q0a2ZsWHN0WG00dDlyOUtQdm5HM1JObWRUTUZqeXcvbVZud2t5CkJuRURmKzY4Z1NaSWExbVZCL29yanUwa3JZZnU4elVTdVZwUTU5QkpPVm0vZFdKSUVMRmRPd29vSmNaTWpRQk8KeGFaN1ZZamhpR0JwRHlHdjNBUUpIc3cyalk0WTlJUVdTdjYzTkR4bUYwemN3Q1c4M2JHU3laQkk4dlp4TkFRbQphWWNBWE5YOVVMK0JjRnp0aVZrbTVLS0s1c05nVzVlU0p0eWl6clpHcHhhOHZDQ01LZm9IeVA1QTdrVlhzQ2lFCkJWaFY1eC9yQ0JRc0FoNVFLMkF5cTZOTC8vM3pjRDN4eFpYMHpLWGs0WmRnVG14TDF5VT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo=", "encoding": "base64", "item": [ "ca", { "ca": "self-sign", "dns": [ "localhost" ], "name": "quadlet_demo" } ], "source": "/etc/pki/tls/certs/quadlet_demo.crt" } TASK [fedora.linux_system_roles.certificate : Create return data] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:160 Saturday 11 January 2025 11:31:24 -0500 (0:00:01.196) 0:00:11.299 ****** ok: [managed-node2] => { "ansible_facts": { "certificate_test_certs": { "quadlet_demo": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPnkC80HFHQ35o\nMrC057G/cmdiNDchwBZEFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH\n3pjMl9Sbg0/MBhgNoR/sEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx\n9pZ4ANRYxNgrBZ+JLZFMA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY12\n2lH8Fe8SuJuerSD8ihUDiCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz\n1Evv/N5iaPRHCEAQVMM7YcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK\n/kAzux93AgMBAAECgf8NQ/0DgSlymxRBuLJIk6naEawbrIOQJH7+kaGR+8R+tNf5\nnQbUe22smk7gWHVXr0KrxIrIACa09kQOhc8Y+lFkeciNLmjIv0vvbhQj5xzpkzAn\nycOUiX0qbR7eY3PW++5CwV/x4j4mrkpGNLIfZoB0Bvs2n+5/VVe8cVeaDIPEq4tx\nU5PdviSv8xT8c/xndTNnW+3X+Bkss4yF59NYft6p2I0eRqObMnpSE0uGDbR7YBtn\ndM2J4w8kDzi7VYRRTCHMorvwkbGQfIKm4vW91sacfzY5m5ZOZ6+gWysO9LBb8tb7\n/J69rZ+eODhy2cAVCnO4pQ/jlHEfQkaCIeUv+D0CgYEA6j7EbjDmrzUZXvoarF7D\nuHAwbmwEeO3C4nUauJRytR0KVCE9otFUGOQrbcieajpO0N5xpv8NiMwevKApHOQa\n4xVNHOk6O7a72IrX24sq6LJD7e8Yd8EVW2nOFKvS8kwLiz2NS4rNjMOeiQ/SzShn\n2+/iSePirA2TFj1AKQsxIAsCgYEA4uZsHzrMlYvQ6LExE7hVJ+/pMt5X9+nq9b9i\n8kIxLob8hqdEVqaBzgKVD+Nn4eLYNpUOg3DCYnL6kZxRhPIXD3yFvngzoScbXcxq\neq+DbzMkcGylljs/hqDY3P+p/lgMkGsqzWVp6myG6qVRvlsqwAkP1vCa/I2y/CES\nUv3vxcUCgYEAg+O7bd+Sw2u+UT+47PyebqacXk/rZLtaU29kEDFlNJyZSC0/KwF5\nvFocKcAfkegqoGfezgwlbrSfSc1T2LSGfv1rYa/f3edRdOcYCsEc89ALd1h8ClFo\n2QOFyRXFvC8dANUzTSll+q/Z/4otQt5RPAChBMGOyxkJjCgIjBOFIWkCgYEApeZw\ne80UNOvLzbdw+Mg4i0DhMy5bB4tjHlfXUuaO60QLc7DtSKXqFuTQdJODKygfipkl\nyXkyx6TmDH1NJXWU4t/bMk7VASmE2upRmG6oHTA5nnHyCIw9gIQuzDeq5asfVKDm\n5NMXxC2h4AMMZ7Bk65A0oqW9zJcWnT7ZFRFStTkCgYAKv7znEt+KAoMX3le1dlr1\n+0UxR0ulD1oYFEBbW8Sojgf4rDh5UO78caloH5X7iv5ne0eyLkaqM9PfCU+OPuGQ\nWqSXPk3YXAglZop/hDN6z8D6xPKFNOS8FWMNC1KljYp7lDYiOow9mA85G8pnUrP6\nVOr9+qqMs/O30kVCgb/YFA==\n-----END PRIVATE KEY-----\n" } } }, "changed": false } TASK [fedora.linux_system_roles.certificate : Stop tracking certificates] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:176 Saturday 11 January 2025 11:31:24 -0500 (0:00:00.045) 0:00:11.344 ****** ok: [managed-node2] => (item={'cert': '/etc/pki/tls/certs/quadlet_demo.crt', 'cert_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n', 'key': '/etc/pki/tls/private/quadlet_demo.key', 'key_content': '-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPnkC80HFHQ35o\nMrC057G/cmdiNDchwBZEFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH\n3pjMl9Sbg0/MBhgNoR/sEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx\n9pZ4ANRYxNgrBZ+JLZFMA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY12\n2lH8Fe8SuJuerSD8ihUDiCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz\n1Evv/N5iaPRHCEAQVMM7YcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK\n/kAzux93AgMBAAECgf8NQ/0DgSlymxRBuLJIk6naEawbrIOQJH7+kaGR+8R+tNf5\nnQbUe22smk7gWHVXr0KrxIrIACa09kQOhc8Y+lFkeciNLmjIv0vvbhQj5xzpkzAn\nycOUiX0qbR7eY3PW++5CwV/x4j4mrkpGNLIfZoB0Bvs2n+5/VVe8cVeaDIPEq4tx\nU5PdviSv8xT8c/xndTNnW+3X+Bkss4yF59NYft6p2I0eRqObMnpSE0uGDbR7YBtn\ndM2J4w8kDzi7VYRRTCHMorvwkbGQfIKm4vW91sacfzY5m5ZOZ6+gWysO9LBb8tb7\n/J69rZ+eODhy2cAVCnO4pQ/jlHEfQkaCIeUv+D0CgYEA6j7EbjDmrzUZXvoarF7D\nuHAwbmwEeO3C4nUauJRytR0KVCE9otFUGOQrbcieajpO0N5xpv8NiMwevKApHOQa\n4xVNHOk6O7a72IrX24sq6LJD7e8Yd8EVW2nOFKvS8kwLiz2NS4rNjMOeiQ/SzShn\n2+/iSePirA2TFj1AKQsxIAsCgYEA4uZsHzrMlYvQ6LExE7hVJ+/pMt5X9+nq9b9i\n8kIxLob8hqdEVqaBzgKVD+Nn4eLYNpUOg3DCYnL6kZxRhPIXD3yFvngzoScbXcxq\neq+DbzMkcGylljs/hqDY3P+p/lgMkGsqzWVp6myG6qVRvlsqwAkP1vCa/I2y/CES\nUv3vxcUCgYEAg+O7bd+Sw2u+UT+47PyebqacXk/rZLtaU29kEDFlNJyZSC0/KwF5\nvFocKcAfkegqoGfezgwlbrSfSc1T2LSGfv1rYa/f3edRdOcYCsEc89ALd1h8ClFo\n2QOFyRXFvC8dANUzTSll+q/Z/4otQt5RPAChBMGOyxkJjCgIjBOFIWkCgYEApeZw\ne80UNOvLzbdw+Mg4i0DhMy5bB4tjHlfXUuaO60QLc7DtSKXqFuTQdJODKygfipkl\nyXkyx6TmDH1NJXWU4t/bMk7VASmE2upRmG6oHTA5nnHyCIw9gIQuzDeq5asfVKDm\n5NMXxC2h4AMMZ7Bk65A0oqW9zJcWnT7ZFRFStTkCgYAKv7znEt+KAoMX3le1dlr1\n+0UxR0ulD1oYFEBbW8Sojgf4rDh5UO78caloH5X7iv5ne0eyLkaqM9PfCU+OPuGQ\nWqSXPk3YXAglZop/hDN6z8D6xPKFNOS8FWMNC1KljYp7lDYiOow9mA85G8pnUrP6\nVOr9+qqMs/O30kVCgb/YFA==\n-----END PRIVATE KEY-----\n', 'ca': '/etc/pki/tls/certs/quadlet_demo.crt', 'ca_content': '-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n'}) => { "ansible_loop_var": "item", "changed": false, "cmd": [ "getcert", "stop-tracking", "-f", "/etc/pki/tls/certs/quadlet_demo.crt" ], "delta": "0:00:00.026637", "end": "2025-01-11 11:31:25.035264", "item": { "ca": "/etc/pki/tls/certs/quadlet_demo.crt", "ca_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n", "cert": "/etc/pki/tls/certs/quadlet_demo.crt", "cert_content": "-----BEGIN CERTIFICATE-----\nMIIDgjCCAmqgAwIBAgIQNri70fAaSKG5G7b94mTT4DANBgkqhkiG9w0BAQsFADBQ\nMSAwHgYDVQQDDBdMb2NhbCBTaWduaW5nIEF1dGhvcml0eTEsMCoGA1UEAwwjMzZi\nOGJiZDEtZjAxYTQ4YTEtYjkxYmI2ZmQtZTI2NGQzZGYwHhcNMjUwMTExMTYzMTIz\nWhcNMjYwMTExMTYzMTIyWjAUMRIwEAYDVQQDEwlsb2NhbGhvc3QwggEiMA0GCSqG\nSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDPnkC80HFHQ35oMrC057G/cmdiNDchwBZE\nFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH3pjMl9Sbg0/MBhgNoR/s\nEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx9pZ4ANRYxNgrBZ+JLZFM\nA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY122lH8Fe8SuJuerSD8ihUD\niCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz1Evv/N5iaPRHCEAQVMM7\nYcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK/kAzux93AgMBAAGjgZMw\ngZAwCwYDVR0PBAQDAgWgMBQGA1UdEQQNMAuCCWxvY2FsaG9zdDAdBgNVHSUEFjAU\nBggrBgEFBQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQUI/sa\n7sDHR1tDsdo5ijDG4bDnzecwHwYDVR0jBBgwFoAUoq7dOmV3Zk5wAehTQ2s9wEFh\nD54wDQYJKoZIhvcNAQELBQADggEBAEXgy+c/0I+Q5H/kpK70wUTMJYNdH1eb7FFY\nMIV+XgV703jKnOwLzerlYeYCrst4kflXstXm4t9r9KPvnG3RNmdTMFjyw/mVnwky\nBnEDf+68gSZIa1mVB/orju0krYfu8zUSuVpQ59BJOVm/dWJIELFdOwooJcZMjQBO\nxaZ7VYjhiGBpDyGv3AQJHsw2jY4Y9IQWSv63NDxmF0zcwCW83bGSyZBI8vZxNAQm\naYcAXNX9UL+BcFztiVkm5KKK5sNgW5eSJtyizrZGpxa8vCCMKfoHyP5A7kVXsCiE\nBVhV5x/rCBQsAh5QK2Ayq6NL//3zcD3xxZX0zKXk4ZdgTmxL1yU=\n-----END CERTIFICATE-----\n", "key": "/etc/pki/tls/private/quadlet_demo.key", "key_content": "-----BEGIN PRIVATE KEY-----\nMIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDPnkC80HFHQ35o\nMrC057G/cmdiNDchwBZEFzR7pSWrv/1AKoHoqX6I303tKiEga8/n2NrCENRLjYwH\n3pjMl9Sbg0/MBhgNoR/sEhrPo9KgkKosWCvGG+l2Scp8Oq+60M9KdUP43gu5foVx\n9pZ4ANRYxNgrBZ+JLZFMA3YTiRbZiX5r1lHVU0cDNOwcwLGO82Rbcq6GQLPuQY12\n2lH8Fe8SuJuerSD8ihUDiCtg/ZQ3AUuL96rqZiFXUCqMuKeQNr+K1Z31RFns9FRz\n1Evv/N5iaPRHCEAQVMM7YcXaJSSF060N+FWD+hhU0VFzLl2+6BsLOsE3+DeBWbVK\n/kAzux93AgMBAAECgf8NQ/0DgSlymxRBuLJIk6naEawbrIOQJH7+kaGR+8R+tNf5\nnQbUe22smk7gWHVXr0KrxIrIACa09kQOhc8Y+lFkeciNLmjIv0vvbhQj5xzpkzAn\nycOUiX0qbR7eY3PW++5CwV/x4j4mrkpGNLIfZoB0Bvs2n+5/VVe8cVeaDIPEq4tx\nU5PdviSv8xT8c/xndTNnW+3X+Bkss4yF59NYft6p2I0eRqObMnpSE0uGDbR7YBtn\ndM2J4w8kDzi7VYRRTCHMorvwkbGQfIKm4vW91sacfzY5m5ZOZ6+gWysO9LBb8tb7\n/J69rZ+eODhy2cAVCnO4pQ/jlHEfQkaCIeUv+D0CgYEA6j7EbjDmrzUZXvoarF7D\nuHAwbmwEeO3C4nUauJRytR0KVCE9otFUGOQrbcieajpO0N5xpv8NiMwevKApHOQa\n4xVNHOk6O7a72IrX24sq6LJD7e8Yd8EVW2nOFKvS8kwLiz2NS4rNjMOeiQ/SzShn\n2+/iSePirA2TFj1AKQsxIAsCgYEA4uZsHzrMlYvQ6LExE7hVJ+/pMt5X9+nq9b9i\n8kIxLob8hqdEVqaBzgKVD+Nn4eLYNpUOg3DCYnL6kZxRhPIXD3yFvngzoScbXcxq\neq+DbzMkcGylljs/hqDY3P+p/lgMkGsqzWVp6myG6qVRvlsqwAkP1vCa/I2y/CES\nUv3vxcUCgYEAg+O7bd+Sw2u+UT+47PyebqacXk/rZLtaU29kEDFlNJyZSC0/KwF5\nvFocKcAfkegqoGfezgwlbrSfSc1T2LSGfv1rYa/f3edRdOcYCsEc89ALd1h8ClFo\n2QOFyRXFvC8dANUzTSll+q/Z/4otQt5RPAChBMGOyxkJjCgIjBOFIWkCgYEApeZw\ne80UNOvLzbdw+Mg4i0DhMy5bB4tjHlfXUuaO60QLc7DtSKXqFuTQdJODKygfipkl\nyXkyx6TmDH1NJXWU4t/bMk7VASmE2upRmG6oHTA5nnHyCIw9gIQuzDeq5asfVKDm\n5NMXxC2h4AMMZ7Bk65A0oqW9zJcWnT7ZFRFStTkCgYAKv7znEt+KAoMX3le1dlr1\n+0UxR0ulD1oYFEBbW8Sojgf4rDh5UO78caloH5X7iv5ne0eyLkaqM9PfCU+OPuGQ\nWqSXPk3YXAglZop/hDN6z8D6xPKFNOS8FWMNC1KljYp7lDYiOow9mA85G8pnUrP6\nVOr9+qqMs/O30kVCgb/YFA==\n-----END PRIVATE KEY-----\n" }, "rc": 0, "start": "2025-01-11 11:31:25.008627" } STDOUT: Request "20250111163123" removed. TASK [fedora.linux_system_roles.certificate : Remove files] ******************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181 Saturday 11 January 2025 11:31:25 -0500 (0:00:00.504) 0:00:11.849 ****** changed: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } changed: [managed-node2] => (item=/etc/pki/tls/private/quadlet_demo.key) => { "ansible_loop_var": "item", "changed": true, "item": "/etc/pki/tls/private/quadlet_demo.key", "path": "/etc/pki/tls/private/quadlet_demo.key", "state": "absent" } ok: [managed-node2] => (item=/etc/pki/tls/certs/quadlet_demo.crt) => { "ansible_loop_var": "item", "changed": false, "item": "/etc/pki/tls/certs/quadlet_demo.crt", "path": "/etc/pki/tls/certs/quadlet_demo.crt", "state": "absent" } TASK [Run the role] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:62 Saturday 11 January 2025 11:31:26 -0500 (0:00:01.141) 0:00:12.990 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 11 January 2025 11:31:26 -0500 (0:00:00.155) 0:00:13.145 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 11 January 2025 11:31:26 -0500 (0:00:00.040) 0:00:13.186 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 11 January 2025 11:31:26 -0500 (0:00:00.055) 0:00:13.241 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 11 January 2025 11:31:26 -0500 (0:00:00.390) 0:00:13.632 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 11 January 2025 11:31:26 -0500 (0:00:00.028) 0:00:13.660 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 11 January 2025 11:31:27 -0500 (0:00:00.372) 0:00:14.033 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 11 January 2025 11:31:27 -0500 (0:00:00.025) 0:00:14.058 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 11 January 2025 11:31:27 -0500 (0:00:00.047) 0:00:14.106 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 11 January 2025 11:31:28 -0500 (0:00:01.180) 0:00:15.287 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 11 January 2025 11:31:28 -0500 (0:00:00.071) 0:00:15.359 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 11 January 2025 11:31:28 -0500 (0:00:00.079) 0:00:15.438 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 11 January 2025 11:31:28 -0500 (0:00:00.071) 0:00:15.510 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 11 January 2025 11:31:28 -0500 (0:00:00.054) 0:00:15.564 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 11 January 2025 11:31:28 -0500 (0:00:00.054) 0:00:15.618 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.025009", "end": "2025-01-11 11:31:29.203872", "rc": 0, "start": "2025-01-11 11:31:29.178863" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.421) 0:00:16.040 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.035) 0:00:16.075 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.036) 0:00:16.112 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.102) 0:00:16.214 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.136) 0:00:16.351 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.047) 0:00:16.399 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.075) 0:00:16.474 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:29 -0500 (0:00:00.093) 0:00:16.567 ****** ok: [managed-node2] => { "ansible_facts": { "getent_passwd": { "root": [ "x", "0", "0", "Super User", "/root", "/bin/bash" ] } }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.484) 0:00:17.052 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.031) 0:00:17.083 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.037) 0:00:17.120 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.381) 0:00:17.501 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.031) 0:00:17.533 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.029) 0:00:17.562 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.029) 0:00:17.592 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.028) 0:00:17.620 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.031) 0:00:17.651 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.028) 0:00:17.680 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.028) 0:00:17.709 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 11 January 2025 11:31:30 -0500 (0:00:00.029) 0:00:17.738 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.053) 0:00:17.791 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.052) 0:00:17.844 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.033) 0:00:17.877 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.083) 0:00:17.960 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.061) 0:00:18.022 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.034) 0:00:18.057 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.034) 0:00:18.091 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.061) 0:00:18.153 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.031) 0:00:18.185 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.027) 0:00:18.212 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.055) 0:00:18.267 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.028) 0:00:18.296 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.029) 0:00:18.325 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.049) 0:00:18.375 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.029) 0:00:18.404 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.097) 0:00:18.501 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.051) 0:00:18.553 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 11 January 2025 11:31:31 -0500 (0:00:00.040) 0:00:18.593 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 11 January 2025 11:31:32 -0500 (0:00:00.469) 0:00:19.062 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_ostree": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 11 January 2025 11:31:32 -0500 (0:00:00.055) 0:00:19.118 ****** ok: [managed-node2] => { "changed": false, "stat": { "exists": false } } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 11 January 2025 11:31:32 -0500 (0:00:00.395) 0:00:19.514 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_is_transactional": false }, "changed": false } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 11 January 2025 11:31:32 -0500 (0:00:00.051) 0:00:19.566 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.818) 0:00:20.384 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.065) 0:00:20.450 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.076) 0:00:20.526 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.047) 0:00:20.573 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.044) 0:00:20.618 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 11 January 2025 11:31:33 -0500 (0:00:00.055) 0:00:20.673 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:29:10 EST", "ActiveEnterTimestampMonotonic": "271605433", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus-broker.service sysinit.target system.slice basic.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:29:10 EST", "AssertTimestampMonotonic": "271333792", "Before": "network-pre.target multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "477685000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:29:10 EST", "ConditionTimestampMonotonic": "271333789", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service shutdown.target ebtables.service ip6tables.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainHandoffTimestampMonotonic": "271363582", "ExecMainPID": "10862", "ExecMainStartTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainStartTimestampMonotonic": "271336631", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:29:10 EST", "InactiveExitTimestampMonotonic": "271337679", "InvocationID": "f2f268445be048bc90d6f558b31f85a8", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10862", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3214090240", "MemoryCurrent": "33046528", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35106816", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:31:22 EST", "StateChangeTimestampMonotonic": "403358927", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 11 January 2025 11:31:34 -0500 (0:00:00.580) 0:00:21.254 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:29:10 EST", "ActiveEnterTimestampMonotonic": "271605433", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus-broker.service sysinit.target system.slice basic.target dbus.socket", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:29:10 EST", "AssertTimestampMonotonic": "271333792", "Before": "network-pre.target multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "477685000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:29:10 EST", "ConditionTimestampMonotonic": "271333789", "ConfigurationDirectoryMode": "0755", "Conflicts": "ipset.service shutdown.target ebtables.service ip6tables.service iptables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainHandoffTimestampMonotonic": "271363582", "ExecMainPID": "10862", "ExecMainStartTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainStartTimestampMonotonic": "271336631", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:29:10 EST", "InactiveExitTimestampMonotonic": "271337679", "InvocationID": "f2f268445be048bc90d6f558b31f85a8", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10862", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3214274560", "MemoryCurrent": "33046528", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35106816", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "system.slice dbus.socket sysinit.target", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:31:22 EST", "StateChangeTimestampMonotonic": "403358927", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 11 January 2025 11:31:35 -0500 (0:00:00.542) 0:00:21.797 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 11 January 2025 11:31:35 -0500 (0:00:00.040) 0:00:21.837 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 11 January 2025 11:31:35 -0500 (0:00:00.030) 0:00:21.867 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 11 January 2025 11:31:35 -0500 (0:00:00.026) 0:00:21.894 ****** changed: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "8000/tcp", "state": "enabled" } } changed: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": true, "ansible_loop_var": "item", "changed": true, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 11 January 2025 11:31:36 -0500 (0:00:01.163) 0:00:23.058 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.048) 0:00:23.106 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.030) 0:00:23.137 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.028) 0:00:23.166 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.030) 0:00:23.196 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.026) 0:00:23.223 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.059) 0:00:23.282 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.043) 0:00:23.326 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.029) 0:00:23.355 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.029) 0:00:23.385 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.024) 0:00:23.410 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.025) 0:00:23.435 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.171) 0:00:23.607 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.058) 0:00:23.665 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:36 -0500 (0:00:00.076) 0:00:23.742 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.035) 0:00:23.777 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.036) 0:00:23.814 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.041) 0:00:23.856 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.030) 0:00:23.887 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.028) 0:00:23.916 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.029) 0:00:23.945 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.028) 0:00:23.973 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.032) 0:00:24.006 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.030) 0:00:24.036 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.029) 0:00:24.066 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.059) 0:00:24.126 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.030) 0:00:24.156 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.037) 0:00:24.194 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.059) 0:00:24.253 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.041) 0:00:24.294 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.045) 0:00:24.339 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.045) 0:00:24.384 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:31:37 -0500 (0:00:00.044) 0:00:24.428 ****** [WARNING]: Using a variable for a task's 'args' is unsafe in some situations (see https://docs.ansible.com/ansible/devel/reference_appendices/faq.html#argsplat- unsafe) changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.742) 0:00:25.171 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.050) 0:00:25.221 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.070) 0:00:25.292 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.040) 0:00:25.332 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.039) 0:00:25.371 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.044) 0:00:25.416 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.028) 0:00:25.445 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.029) 0:00:25.475 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.030) 0:00:25.505 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.030) 0:00:25.536 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.029) 0:00:25.565 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.077) 0:00:25.642 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.043) 0:00:25.685 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:38 -0500 (0:00:00.035) 0:00:25.720 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.035) 0:00:25.756 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.078) 0:00:25.834 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.055) 0:00:25.889 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.028) 0:00:25.918 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.029) 0:00:25.948 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.026) 0:00:25.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.027) 0:00:26.002 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.568) 0:00:26.571 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.055) 0:00:26.626 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:39 -0500 (0:00:00.090) 0:00:26.717 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.052) 0:00:26.769 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.062) 0:00:26.832 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.100) 0:00:26.933 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.039) 0:00:26.972 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.036) 0:00:27.008 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.038) 0:00:27.047 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.033) 0:00:27.081 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.065) 0:00:27.147 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.030) 0:00:27.178 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.031) 0:00:27.209 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.029) 0:00:27.239 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.037) 0:00:27.277 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.057) 0:00:27.334 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.096) 0:00:27.431 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.050) 0:00:27.482 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.049) 0:00:27.531 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.049) 0:00:27.580 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:31:40 -0500 (0:00:00.032) 0:00:27.613 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.550) 0:00:28.164 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.038) 0:00:28.202 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.152) 0:00:28.355 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.045) 0:00:28.401 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.038) 0:00:28.439 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.037) 0:00:28.477 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.054) 0:00:28.531 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.100) 0:00:28.632 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.034) 0:00:28.666 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.033) 0:00:28.700 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:41 -0500 (0:00:00.042) 0:00:28.742 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.409) 0:00:29.152 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.032) 0:00:29.185 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.030) 0:00:29.215 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.030) 0:00:29.246 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.032) 0:00:29.278 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.034) 0:00:29.313 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.034) 0:00:29.347 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.035) 0:00:29.382 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.033) 0:00:29.416 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.073) 0:00:29.490 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.054) 0:00:29.544 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.037) 0:00:29.581 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.093) 0:00:29.675 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:31:42 -0500 (0:00:00.046) 0:00:29.722 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.033) 0:00:29.755 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.067) 0:00:29.823 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.089) 0:00:29.913 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.029) 0:00:29.943 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.027) 0:00:29.971 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.028) 0:00:29.999 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.029) 0:00:30.029 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.047) 0:00:30.076 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:43 -0500 (0:00:00.438) 0:00:30.515 ****** changed: [managed-node2] => { "changed": true, "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "dest": "/etc/containers/systemd/quadlet-demo.network", "gid": 0, "group": "root", "md5sum": "061f3cf318cbd8ab5794bb1173831fb8", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 74, "src": "/root/.ansible/tmp/ansible-tmp-1736613103.8228424-13931-69007231641131/.source.network", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:31:44 -0500 (0:00:00.897) 0:00:31.412 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:31:44 -0500 (0:00:00.048) 0:00:31.461 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:31:44 -0500 (0:00:00.037) 0:00:31.498 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:31:45 -0500 (0:00:00.800) 0:00:32.298 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-network.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "system.slice network-online.target systemd-journald.socket sysinit.target -.mount basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3211874304", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "system.slice sysinit.target -.mount", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.701) 0:00:32.999 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.058) 0:00:33.057 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.073) 0:00:33.131 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.044) 0:00:33.175 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.059) 0:00:33.235 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.061) 0:00:33.297 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.063) 0:00:33.360 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.032) 0:00:33.393 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.071) 0:00:33.465 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:46 -0500 (0:00:00.042) 0:00:33.507 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.405) 0:00:33.912 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.031) 0:00:33.944 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.033) 0:00:33.977 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.033) 0:00:34.010 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.032) 0:00:34.043 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.031) 0:00:34.075 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.032) 0:00:34.108 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.032) 0:00:34.140 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.031) 0:00:34.172 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.051) 0:00:34.223 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.035) 0:00:34.259 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.054) 0:00:34.314 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.076) 0:00:34.390 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.039) 0:00:34.430 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.029) 0:00:34.460 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.063) 0:00:34.523 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.049) 0:00:34.573 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.028) 0:00:34.602 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.064) 0:00:34.666 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.029) 0:00:34.696 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:31:47 -0500 (0:00:00.027) 0:00:34.724 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:31:48 -0500 (0:00:00.028) 0:00:34.752 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 34, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:48 -0500 (0:00:00.398) 0:00:35.150 ****** changed: [managed-node2] => { "changed": true, "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "dest": "/etc/containers/systemd/quadlet-demo-mysql.volume", "gid": 0, "group": "root", "md5sum": "5ddd03a022aeb4502d9bc8ce436b4233", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 9, "src": "/root/.ansible/tmp/ansible-tmp-1736613108.4490383-14098-270882481689807/.source.volume", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:31:49 -0500 (0:00:00.735) 0:00:35.886 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:31:49 -0500 (0:00:00.033) 0:00:35.920 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:31:49 -0500 (0:00:00.030) 0:00:35.950 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:31:49 -0500 (0:00:00.748) 0:00:36.698 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql-volume.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "-.mount basic.target systemd-journald.socket system.slice network-online.target sysinit.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3196256256", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "-.mount system.slice sysinit.target", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.631) 0:00:37.330 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.033) 0:00:37.364 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.111) 0:00:37.475 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.040) 0:00:37.515 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.031) 0:00:37.547 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.047) 0:00:37.594 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.055) 0:00:37.650 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.035) 0:00:37.685 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:31:50 -0500 (0:00:00.034) 0:00:37.719 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.043) 0:00:37.763 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.395) 0:00:38.159 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.033) 0:00:38.192 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.076) 0:00:38.268 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.033) 0:00:38.302 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.032) 0:00:38.335 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.032) 0:00:38.367 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.032) 0:00:38.400 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.031) 0:00:38.431 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.048) 0:00:38.479 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.063) 0:00:38.543 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.034) 0:00:38.577 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.031) 0:00:38.608 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.079) 0:00:38.687 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:31:51 -0500 (0:00:00.038) 0:00:38.726 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.030) 0:00:38.756 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.063) 0:00:38.820 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.050) 0:00:38.870 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.031) 0:00:38.902 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.029) 0:00:38.931 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.030) 0:00:38.962 ****** changed: [managed-node2] => (item=/tmp/quadlet_demo) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/quadlet_demo", "mode": "0777", "owner": "root", "path": "/tmp/quadlet_demo", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:31:52 -0500 (0:00:00.410) 0:00:39.372 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:31:59 -0500 (0:00:06.680) 0:00:46.053 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 67, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:31:59 -0500 (0:00:00.385) 0:00:46.438 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:31:59 -0500 (0:00:00.032) 0:00:46.471 ****** changed: [managed-node2] => { "changed": true, "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "dest": "/etc/containers/systemd/quadlet-demo-mysql.container", "gid": 0, "group": "root", "md5sum": "341b473056d2a5dfa35970b0d2e23a5d", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 363, "src": "/root/.ansible/tmp/ansible-tmp-1736613119.7690783-14333-16171380749252/.source.container", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:32:00 -0500 (0:00:00.702) 0:00:47.173 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:32:00 -0500 (0:00:00.034) 0:00:47.208 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:32:01 -0500 (0:00:00.742) 0:00:47.951 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo-mysql.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "tmp.mount -.mount network-online.target quadlet-demo-mysql-volume.service quadlet-demo-network.service sysinit.target systemd-journald.socket system.slice basic.target", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3063742464", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-mysql-volume.service system.slice quadlet-demo-network.service -.mount sysinit.target", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.917) 0:00:48.868 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.094) 0:00:48.962 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.074) 0:00:49.036 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.048) 0:00:49.085 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.040) 0:00:49.125 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.058) 0:00:49.183 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.059) 0:00:49.243 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.037) 0:00:49.280 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.036) 0:00:49.316 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:32:02 -0500 (0:00:00.046) 0:00:49.363 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.430) 0:00:49.793 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.054) 0:00:49.848 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.053) 0:00:49.902 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.051) 0:00:49.953 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.099) 0:00:50.055 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.117) 0:00:50.173 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.052) 0:00:50.226 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.036) 0:00:50.263 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.036) 0:00:50.299 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.064) 0:00:50.363 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.031) 0:00:50.395 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.030) 0:00:50.425 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.074) 0:00:50.500 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.038) 0:00:50.539 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.032) 0:00:50.572 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.099) 0:00:50.671 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:32:03 -0500 (0:00:00.059) 0:00:50.731 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.036) 0:00:50.767 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.033) 0:00:50.801 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.028) 0:00:50.829 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.026) 0:00:50.856 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.028) 0:00:50.885 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 103, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:04 -0500 (0:00:00.447) 0:00:51.332 ****** changed: [managed-node2] => { "changed": true, "checksum": "d681c7d56f912150d041873e880818b22a90c188", "dest": "/etc/containers/systemd/envoy-proxy-configmap.yml", "gid": 0, "group": "root", "md5sum": "aec75d972c231aac004e1338934544cf", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2102, "src": "/root/.ansible/tmp/ansible-tmp-1736613124.6292498-14522-101146869489364/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:32:05 -0500 (0:00:00.744) 0:00:52.077 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:32:05 -0500 (0:00:00.169) 0:00:52.246 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:32:05 -0500 (0:00:00.053) 0:00:52.299 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.802) 0:00:53.101 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.057) 0:00:53.160 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.080) 0:00:53.240 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.157) 0:00:53.397 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.063) 0:00:53.461 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.051) 0:00:53.512 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.076) 0:00:53.589 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.094) 0:00:53.683 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:32:06 -0500 (0:00:00.056) 0:00:53.739 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.048) 0:00:53.788 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.049) 0:00:53.838 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.378) 0:00:54.217 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.037) 0:00:54.254 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.036) 0:00:54.290 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.053) 0:00:54.344 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.042) 0:00:54.386 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.030) 0:00:54.417 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.031) 0:00:54.448 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.030) 0:00:54.479 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.067) 0:00:54.547 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.052) 0:00:54.599 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.035) 0:00:54.635 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_kube_yamls_raw | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:32:07 -0500 (0:00:00.035) 0:00:54.670 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.120) 0:00:54.791 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.056) 0:00:54.847 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.045) 0:00:54.893 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.101) 0:00:54.994 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.082) 0:00:55.077 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.043) 0:00:55.120 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.047) 0:00:55.167 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.048) 0:00:55.216 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.045) 0:00:55.262 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:32:08 -0500 (0:00:00.044) 0:00:55.306 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 136, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:09 -0500 (0:00:00.449) 0:00:55.756 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_quadlet_file_src | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:32:09 -0500 (0:00:00.037) 0:00:55.793 ****** changed: [managed-node2] => { "changed": true, "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "dest": "/etc/containers/systemd/quadlet-demo.yml", "gid": 0, "group": "root", "md5sum": "fd890594adfc24339cb9cdc5e7b19a66", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 1605, "src": "/root/.ansible/tmp/ansible-tmp-1736613129.091449-14710-252865355782244/.source.yml", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:32:09 -0500 (0:00:00.701) 0:00:56.495 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_content is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:32:09 -0500 (0:00:00.028) 0:00:56.523 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.772) 0:00:57.296 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.039) 0:00:57.336 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.070) 0:00:57.406 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.042) 0:00:57.449 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "created", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.056) 0:00:57.506 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.046) 0:00:57.552 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.077) 0:00:57.629 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.069) 0:00:57.698 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:32:10 -0500 (0:00:00.042) 0:00:57.741 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.038) 0:00:57.780 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.040) 0:00:57.820 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.379) 0:00:58.200 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.031) 0:00:58.231 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.033) 0:00:58.264 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.033) 0:00:58.298 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.033) 0:00:58.331 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.034) 0:00:58.366 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.051) 0:00:58.417 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.053) 0:00:58.471 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.054) 0:00:58.525 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.091) 0:00:58.617 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:32:11 -0500 (0:00:00.056) 0:00:58.673 ****** ok: [managed-node2] => { "changed": false, "content": "LS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBlcnNpc3RlbnRWb2x1bWVDbGFpbQptZXRhZGF0YToKICBuYW1lOiB3cC1wdi1jbGFpbQogIGxhYmVsczoKICAgIGFwcDogd29yZHByZXNzCnNwZWM6CiAgYWNjZXNzTW9kZXM6CiAgLSBSZWFkV3JpdGVPbmNlCiAgcmVzb3VyY2VzOgogICAgcmVxdWVzdHM6CiAgICAgIHN0b3JhZ2U6IDIwR2kKLS0tCmFwaVZlcnNpb246IHYxCmtpbmQ6IFBvZAptZXRhZGF0YToKICBuYW1lOiBxdWFkbGV0LWRlbW8Kc3BlYzoKICBjb250YWluZXJzOgogIC0gbmFtZTogd29yZHByZXNzCiAgICBpbWFnZTogcXVheS5pby9saW51eC1zeXN0ZW0tcm9sZXMvd29yZHByZXNzOjQuOC1hcGFjaGUKICAgIGVudjoKICAgIC0gbmFtZTogV09SRFBSRVNTX0RCX0hPU1QKICAgICAgdmFsdWU6IHF1YWRsZXQtZGVtby1teXNxbAogICAgLSBuYW1lOiBXT1JEUFJFU1NfREJfUEFTU1dPUkQKICAgICAgdmFsdWVGcm9tOgogICAgICAgIHNlY3JldEtleVJlZjoKICAgICAgICAgIG5hbWU6IG15c3FsLXJvb3QtcGFzc3dvcmQta3ViZQogICAgICAgICAga2V5OiBwYXNzd29yZAogICAgdm9sdW1lTW91bnRzOgogICAgLSBuYW1lOiB3b3JkcHJlc3MtcGVyc2lzdGVudC1zdG9yYWdlCiAgICAgIG1vdW50UGF0aDogL3Zhci93d3cvaHRtbAogICAgcmVzb3VyY2VzOgogICAgICByZXF1ZXN0czoKICAgICAgICBtZW1vcnk6ICI2NE1pIgogICAgICAgIGNwdTogIjI1MG0iCiAgICAgIGxpbWl0czoKICAgICAgICBtZW1vcnk6ICIxMjhNaSIKICAgICAgICBjcHU6ICI1MDBtIgogIC0gbmFtZTogZW52b3kKICAgIGltYWdlOiBxdWF5LmlvL2xpbnV4LXN5c3RlbS1yb2xlcy9lbnZveXByb3h5OnYxLjI1LjAKICAgIHZvbHVtZU1vdW50czoKICAgIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgICBtb3VudFBhdGg6IC9ldGMvZW52b3kKICAgIC0gbmFtZTogY2VydGlmaWNhdGVzCiAgICAgIG1vdW50UGF0aDogL2V0Yy9lbnZveS1jZXJ0aWZpY2F0ZXMKICAgIGVudjoKICAgIC0gbmFtZTogRU5WT1lfVUlECiAgICAgIHZhbHVlOiAiMCIKICAgIHJlc291cmNlczoKICAgICAgcmVxdWVzdHM6CiAgICAgICAgbWVtb3J5OiAiNjRNaSIKICAgICAgICBjcHU6ICIyNTBtIgogICAgICBsaW1pdHM6CiAgICAgICAgbWVtb3J5OiAiMTI4TWkiCiAgICAgICAgY3B1OiAiNTAwbSIKICB2b2x1bWVzOgogIC0gbmFtZTogY29uZmlnLXZvbHVtZQogICAgY29uZmlnTWFwOgogICAgICBuYW1lOiBlbnZveS1wcm94eS1jb25maWcKICAtIG5hbWU6IGNlcnRpZmljYXRlcwogICAgc2VjcmV0OgogICAgICBzZWNyZXROYW1lOiBlbnZveS1jZXJ0aWZpY2F0ZXMKICAtIG5hbWU6IHdvcmRwcmVzcy1wZXJzaXN0ZW50LXN0b3JhZ2UKICAgIHBlcnNpc3RlbnRWb2x1bWVDbGFpbToKICAgICAgY2xhaW1OYW1lOiB3cC1wdi1jbGFpbQogIC0gbmFtZTogd3d3ICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMwogIC0gbmFtZTogY3JlYXRlICAjIG5vdCB1c2VkIC0gZm9yIHRlc3RpbmcgaG9zdHBhdGgKICAgIGhvc3RQYXRoOgogICAgICBwYXRoOiAvdG1wL2h0dHBkMy1jcmVhdGUK", "encoding": "base64", "source": "/etc/containers/systemd/quadlet-demo.yml" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.432) 0:00:59.106 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/wordpress:4.8-apache", "quay.io/linux-system-roles/envoyproxy:v1.25.0" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [ "/tmp/httpd3", "/tmp/httpd3-create" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.209) 0:00:59.315 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.049) 0:00:59.364 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state == \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.036) 0:00:59.400 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:2 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.066) 0:00:59.467 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.053) 0:00:59.521 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.030) 0:00:59.551 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.033) 0:00:59.585 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Create host directories] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:7 Saturday 11 January 2025 11:32:12 -0500 (0:00:00.037) 0:00:59.623 ****** changed: [managed-node2] => (item=/tmp/httpd3) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3", "mode": "0755", "owner": "root", "path": "/tmp/httpd3", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } changed: [managed-node2] => (item=/tmp/httpd3-create) => { "ansible_loop_var": "item", "changed": true, "gid": 0, "group": "root", "item": "/tmp/httpd3-create", "mode": "0755", "owner": "root", "path": "/tmp/httpd3-create", "secontext": "unconfined_u:object_r:user_tmp_t:s0", "size": 6, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure container images are present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 Saturday 11 January 2025 11:32:13 -0500 (0:00:00.798) 0:01:00.421 ****** changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "attempts": 1, "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Ensure the quadlet directory is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:39 Saturday 11 January 2025 11:32:31 -0500 (0:00:17.395) 0:01:17.817 ****** ok: [managed-node2] => { "changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/containers/systemd", "secontext": "system_u:object_r:etc_t:s0", "size": 160, "state": "directory", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is copied] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:48 Saturday 11 January 2025 11:32:31 -0500 (0:00:00.405) 0:01:18.223 ****** changed: [managed-node2] => { "changed": true, "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "dest": "/etc/containers/systemd/quadlet-demo.kube", "gid": 0, "group": "root", "md5sum": "da53c88f92b68b0487aa209f795b6bb3", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 456, "src": "/root/.ansible/tmp/ansible-tmp-1736613151.519664-15332-169452422398586/.source.kube", "state": "file", "uid": 0 } TASK [fedora.linux_system_roles.podman : Ensure quadlet file content is present] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:58 Saturday 11 January 2025 11:32:32 -0500 (0:00:00.697) 0:01:18.920 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure quadlet file is present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:70 Saturday 11 January 2025 11:32:32 -0500 (0:00:00.034) 0:01:18.955 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_copy_file is skipped", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Reload systemctl] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:82 Saturday 11 January 2025 11:32:32 -0500 (0:00:00.036) 0:01:18.992 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Start service] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 Saturday 11 January 2025 11:32:32 -0500 (0:00:00.737) 0:01:19.729 ****** changed: [managed-node2] => { "changed": true, "name": "quadlet-demo.service", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestampMonotonic": "0", "ActiveExitTimestampMonotonic": "0", "ActiveState": "inactive", "After": "sysinit.target quadlet-demo-network.service system.slice basic.target -.mount systemd-journald.socket network-online.target quadlet-demo-mysql.service", "AllowIsolate": "no", "AssertResult": "no", "AssertTimestampMonotonic": "0", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "[not set]", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "no", "ConditionTimestampMonotonic": "0", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "0", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "0", "ExecMainStartTimestampMonotonic": "0", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestampMonotonic": "0", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3021348864", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "[not set]", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "[not set]", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestampMonotonic": "0", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "dead", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "infinity" } } TASK [fedora.linux_system_roles.podman : Restart service] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:125 Saturday 11 January 2025 11:32:37 -0500 (0:00:04.486) 0:01:24.216 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_service_started is changed", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 11 January 2025 11:32:37 -0500 (0:00:00.032) 0:01:24.248 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 11 January 2025 11:32:37 -0500 (0:00:00.027) 0:01:24.276 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 11 January 2025 11:32:37 -0500 (0:00:00.025) 0:01:24.301 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Check quadlet files] ***************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:96 Saturday 11 January 2025 11:32:37 -0500 (0:00:00.082) 0:01:24.384 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/containers/systemd" ], "delta": "0:00:00.004783", "end": "2025-01-11 11:32:37.960045", "rc": 0, "start": "2025-01-11 11:32:37.955262" } STDOUT: total 24 drwxr-xr-x. 9 root root 178 Jan 11 11:29 ../ -rw-r--r--. 1 root root 74 Jan 11 11:31 quadlet-demo.network -rw-r--r--. 1 root root 9 Jan 11 11:31 quadlet-demo-mysql.volume -rw-r--r--. 1 root root 363 Jan 11 11:32 quadlet-demo-mysql.container -rw-r--r--. 1 root root 2102 Jan 11 11:32 envoy-proxy-configmap.yml -rw-r--r--. 1 root root 1605 Jan 11 11:32 quadlet-demo.yml -rw-r--r--. 1 root root 456 Jan 11 11:32 quadlet-demo.kube drwxr-xr-x. 2 root root 185 Jan 11 11:32 ./ TASK [Check containers] ******************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:100 Saturday 11 January 2025 11:32:38 -0500 (0:00:00.411) 0:01:24.796 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.034867", "end": "2025-01-11 11:32:38.408051", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:32:38.373184" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check volumes] *********************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:105 Saturday 11 January 2025 11:32:38 -0500 (0:00:00.433) 0:01:25.229 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls" ], "delta": "0:00:00.034428", "end": "2025-01-11 11:32:38.814124", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:32:38.779696" } STDOUT: DRIVER VOLUME NAME local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-certificates local envoy-proxy-config TASK [Check pods] ************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:110 Saturday 11 January 2025 11:32:38 -0500 (0:00:00.403) 0:01:25.633 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.030063", "end": "2025-01-11 11:32:39.226265", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:32:39.196202" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:115 Saturday 11 January 2025 11:32:39 -0500 (0:00:00.412) 0:01:26.045 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units | grep quadlet", "delta": "0:00:00.013529", "end": "2025-01-11 11:32:39.613809", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:32:39.600280" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service ● quadlet-demo-mysql.service loaded failed failed quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service ● quadlet-demo.service loaded failed failed quadlet-demo.service TASK [Check web] *************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 Saturday 11 January 2025 11:32:39 -0500 (0:00:00.385) 0:01:26.430 ****** FAILED - RETRYING: [managed-node2]: Check web (6 retries left). FAILED - RETRYING: [managed-node2]: Check web (5 retries left). FAILED - RETRYING: [managed-node2]: Check web (4 retries left). FAILED - RETRYING: [managed-node2]: Check web (3 retries left). FAILED - RETRYING: [managed-node2]: Check web (2 retries left). FAILED - RETRYING: [managed-node2]: Check web (1 retries left). fatal: [managed-node2]: FAILED! => { "attempts": 6, "changed": false, "dest": "/run/out", "elapsed": 0, "url": "https://localhost:8000" } MSG: Request failed: TASK [Dump journal] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:142 Saturday 11 January 2025 11:33:12 -0500 (0:00:33.151) 0:01:59.582 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.030105", "end": "2025-01-11 11:33:13.166571", "failed_when_result": true, "rc": 0, "start": "2025-01-11 11:33:13.136466" } STDOUT: Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 51 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 51 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 52 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 52 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 53 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 53 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 54 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 54 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 55 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 55 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 56 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 56 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 57 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 57 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 58 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 58 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: Cannot change IRQ 59 affinity: Permission denied Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com irqbalance[657]: IRQ 59 affinity is now unmanaged Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1398]: Executing: /usr/bin/dracut --list-modules Jan 11 11:24:58 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1469]: dracut-103-1.el10 Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Executing: /usr/bin/dracut --add kdumpbase --quiet --hostonly --hostonly-cmdline --hostonly-i18n --hostonly-mode strict --hostonly-nics --aggressive-strip --omit "rdma plymouth resume ifcfg earlykdump" --mount "/dev/disk/by-uuid/5421f911-fafd-4f0d-bf2e-2916252992eb /sysroot xfs rw,relatime,seclabel,attr2,inode64,logbufs=8,logbsize=32k,noquota" --add squash-squashfs --squash-compressor zstd --no-hostonly-default-device -f /boot/initramfs-6.12.0-38.el10.x86_64kdump.img 6.12.0-38.el10.x86_64 Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-networkd' will not be installed, because command '/usr/lib/systemd/systemd-networkd-wait-online' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmand' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'ifcfg' will not be installed, because it's in the list to be omitted! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'plymouth' will not be installed, because it's in the list to be omitted! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'multipath' will not be installed, because command 'multipath' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'resume' will not be installed, because it's in the list to be omitted! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'biosdevname' will not be installed, because command 'biosdevname' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'earlykdump' will not be installed, because it's in the list to be omitted! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-bsod' will not be installed, because command '/usr/lib/systemd/systemd-bsod' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-pcrphase' will not be installed, because command '/usr/lib/systemd/systemd-pcrphase' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-portabled' will not be installed, because command 'portablectl' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-portabled' will not be installed, because command '/usr/lib/systemd/systemd-portabled' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-resolved' will not be installed, because command '/usr/lib/systemd/systemd-resolved' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-timesyncd' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'systemd-timesyncd' will not be installed, because command '/usr/lib/systemd/systemd-time-wait-sync' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'busybox' will not be installed, because command 'busybox' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'dbus-daemon' will not be installed, because command 'dbus-daemon' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmand' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmanctl' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'connman' will not be installed, because command 'connmand-wait-online' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: 62bluetooth: Could not find any command of '/usr/lib/bluetooth/bluetoothd /usr/libexec/bluetooth/bluetoothd'! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'btrfs' will not be installed, because command 'btrfs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'dmraid' will not be installed, because command 'dmraid' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'mdraid' will not be installed, because command 'mdadm' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'multipath' will not be installed, because command 'multipath' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'crypt-gpg' will not be installed, because command 'gpg' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'pcsc' will not be installed, because command 'pcscd' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'cifs' will not be installed, because command 'mount.cifs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsi-iname' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsiadm' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'iscsi' will not be installed, because command 'iscsid' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'nvmf' will not be installed, because command 'nvme' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'squash-erofs' will not be installed, because command 'mkfs.erofs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Module 'squash-erofs' will not be installed, because command 'fsck.erofs' could not be found! Jan 11 11:24:59 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: fips *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com chronyd[674]: Selected source 10.2.32.38 Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: fips-crypto-policies *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-ask-password *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-initrd *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-journald *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-modules-load *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-sysctl *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-sysusers *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-tmpfiles *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: systemd-udevd *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: rngd *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: i18n *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: drm *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: prefixdevname *** Jan 11 11:25:00 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: kernel-modules *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: kernel-modules-extra *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: kernel-modules-extra: configuration source "/run/depmod.d" does not exist Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: kernel-modules-extra: configuration source "/lib/depmod.d" does not exist Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: kernel-modules-extra: parsing configuration file "/etc/depmod.d/dist.conf" Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: kernel-modules-extra: /etc/depmod.d/dist.conf: added "updates extra built-in weak-updates" to the list of search directories Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: pcmcia *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Skipping udev rule: 60-pcmcia.rules Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: fstab-sys *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: hwdb *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: rootfs-block *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: squash-squashfs *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: terminfo *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: udev-rules *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: dracut-systemd *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: usrmount *** Jan 11 11:25:01 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: base *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: fs-lib *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: kdumpbase *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: memstrack *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: microcode_ctl-fw_dir_override *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl module: mangling fw_dir Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: reset fw_dir to "/lib/firmware/updates /lib/firmware" Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel"... Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: intel: caveats check for kernel version "6.12.0-38.el10.x86_64" passed, adding "/usr/share/microcode_ctl/ucode_with_caveats/intel" to fw_dir variable Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: processing data directory "/usr/share/microcode_ctl/ucode_with_caveats/intel-06-4f-01"... Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: configuration "intel-06-4f-01" is ignored Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: microcode_ctl: final fw_dir: "/usr/share/microcode_ctl/ucode_with_caveats/intel /lib/firmware/updates /lib/firmware" Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: shutdown *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including module: squash-lib *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Including modules done *** Jan 11 11:25:02 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Installing kernel module dependencies *** Jan 11 11:25:03 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Installing kernel module dependencies done *** Jan 11 11:25:03 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Resolving executable dependencies *** Jan 11 11:25:03 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:25:03 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Resolving executable dependencies done *** Jan 11 11:25:03 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Hardlinking files *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Mode: real Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Method: sha256 Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Files: 548 Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Linked: 23 files Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Compared: 0 xattrs Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Compared: 51 files Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Saved: 13.58 MiB Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Duration: 0.193814 seconds Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Hardlinking files done *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Generating early-microcode cpio image *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Constructing GenuineIntel.bin *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Constructing GenuineIntel.bin *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Store current command line parameters *** Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: Stored kernel commandline: Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: No dracut internal kernel commandline stored in the initramfs Jan 11 11:25:04 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Squashing the files inside the initramfs *** Jan 11 11:25:12 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Squashing the files inside the initramfs done *** Jan 11 11:25:12 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Creating image file '/boot/initramfs-6.12.0-38.el10.x86_64kdump.img' *** Jan 11 11:25:12 ip-10-31-42-117.us-east-1.aws.redhat.com dracut[1472]: *** Creating initramfs image file '/boot/initramfs-6.12.0-38.el10.x86_64kdump.img' done *** Jan 11 11:25:12 ip-10-31-42-117.us-east-1.aws.redhat.com kernel: PKCS7: Message signed outside of X.509 validity window Jan 11 11:25:13 ip-10-31-42-117.us-east-1.aws.redhat.com kdumpctl[883]: kdump: kexec: loaded kdump kernel Jan 11 11:25:13 ip-10-31-42-117.us-east-1.aws.redhat.com kdumpctl[883]: kdump: Starting kdump: [OK] Jan 11 11:25:13 ip-10-31-42-117.us-east-1.aws.redhat.com kdumpctl[883]: kdump: Notice: No vmcore creation test performed! Jan 11 11:25:13 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Finished kdump.service - Crash recovery kernel arming. ░░ Subject: A start job for unit kdump.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit kdump.service has finished successfully. ░░ ░░ The job identifier is 260. Jan 11 11:25:13 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Startup finished in 986ms (kernel) + 4.099s (initrd) + 29.406s (userspace) = 34.492s. ░░ Subject: System start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ All system services necessary queued for starting at boot have been ░░ started. Note that this does not mean that the machine is now idle as services ░░ might still be busy with completing start-up. ░░ ░░ Kernel start-up required 986465 microseconds. ░░ ░░ Initrd start-up required 4099369 microseconds. ░░ ░░ Userspace start-up required 29406330 microseconds. Jan 11 11:25:23 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4334]: Accepted publickey for root from 10.30.34.106 port 36526 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: New session 1 of user root. ░░ Subject: A new session 1 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 1 has been created for the user root. ░░ ░░ The leading process of the session is 4334. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Created slice user-0.slice - User Slice of UID 0. ░░ Subject: A start job for unit user-0.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-0.slice has finished successfully. ░░ ░░ The job identifier is 650. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Starting user-runtime-dir@0.service - User Runtime Directory /run/user/0... ░░ Subject: A start job for unit user-runtime-dir@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has begun execution. ░░ ░░ The job identifier is 572. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Finished user-runtime-dir@0.service - User Runtime Directory /run/user/0. ░░ Subject: A start job for unit user-runtime-dir@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@0.service has finished successfully. ░░ ░░ The job identifier is 572. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Starting user@0.service - User Manager for UID 0... ░░ Subject: A start job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 652. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: New session 2 of user root. ░░ Subject: A new session 2 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 2 has been created for the user root. ░░ ░░ The leading process of the session is 4339. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com (systemd)[4339]: pam_unix(systemd-user:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Queued start job for default target default.target. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes was skipped because of an unmet condition check (ConditionUser=!@system). ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 7. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 12. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 3. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[4339]: Startup finished in 113ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 0 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 113388 microseconds. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Started user@0.service - User Manager for UID 0. ░░ Subject: A start job for unit user@0.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@0.service has finished successfully. ░░ ░░ The job identifier is 652. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Started session-1.scope - Session 1 of User root. ░░ Subject: A start job for unit session-1.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-1.scope has finished successfully. ░░ ░░ The job identifier is 733. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4334]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4350]: Received disconnect from 10.30.34.106 port 36526:11: disconnected by user Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4350]: Disconnected from user root 10.30.34.106 port 36526 Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4334]: pam_unix(sshd:session): session closed for user root Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: session-1.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-1.scope has successfully entered the 'dead' state. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: Session 1 logged out. Waiting for processes to exit. Jan 11 11:27:39 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: Removed session 1. ░░ Subject: Session 1 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 1 has been terminated. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4386]: Accepted publickey for root from 10.31.9.220 port 38948 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4387]: Accepted publickey for root from 10.31.9.220 port 38960 ssh2: RSA SHA256:W3cSdmPJK+d9RwU97ardijPXIZnxHswrpTHWW9oYtEU Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: New session 3 of user root. ░░ Subject: A new session 3 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 3 has been created for the user root. ░░ ░░ The leading process of the session is 4386. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Started session-3.scope - Session 3 of User root. ░░ Subject: A start job for unit session-3.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-3.scope has finished successfully. ░░ ░░ The job identifier is 815. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: New session 4 of user root. ░░ Subject: A new session 4 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 4 has been created for the user root. ░░ ░░ The leading process of the session is 4387. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Started session-4.scope - Session 4 of User root. ░░ Subject: A start job for unit session-4.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-4.scope has finished successfully. ░░ ░░ The job identifier is 897. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4386]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4387]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4393]: Received disconnect from 10.31.9.220 port 38960:11: disconnected by user Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4393]: Disconnected from user root 10.31.9.220 port 38960 Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com sshd-session[4387]: pam_unix(sshd:session): session closed for user root Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: session-4.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit session-4.scope has successfully entered the 'dead' state. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: Session 4 logged out. Waiting for processes to exit. Jan 11 11:27:46 ip-10-31-42-117.us-east-1.aws.redhat.com systemd-logind[659]: Removed session 4. ░░ Subject: Session 4 has been terminated ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A session with the ID 4 has been terminated. Jan 11 11:28:02 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Starting systemd-hostnamed.service - Hostname Service... ░░ Subject: A start job for unit systemd-hostnamed.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has begun execution. ░░ ░░ The job identifier is 979. Jan 11 11:28:02 ip-10-31-42-117.us-east-1.aws.redhat.com systemd[1]: Started systemd-hostnamed.service - Hostname Service. ░░ Subject: A start job for unit systemd-hostnamed.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit systemd-hostnamed.service has finished successfully. ░░ ░░ The job identifier is 979. Jan 11 11:28:02 managed-node2 systemd-hostnamed[5841]: Hostname set to (static) Jan 11 11:28:02 managed-node2 NetworkManager[708]: [1736612882.1934] hostname: static hostname changed from "ip-10-31-42-117.us-east-1.aws.redhat.com" to "managed-node2" Jan 11 11:28:02 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 1057. Jan 11 11:28:02 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 1057. Jan 11 11:28:12 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:28:32 managed-node2 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jan 11 11:28:35 managed-node2 sshd-session[6514]: Accepted publickey for root from 10.31.14.128 port 52846 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 11 11:28:35 managed-node2 systemd-logind[659]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6514. Jan 11 11:28:35 managed-node2 systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1136. Jan 11 11:28:35 managed-node2 sshd-session[6514]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:28:36 managed-node2 python3.12[6663]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:28:37 managed-node2 python3.12[6817]: ansible-tempfile Invoked with state=directory prefix=lsr_ suffix=_podman path=None Jan 11 11:28:38 managed-node2 python3.12[6942]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:39 managed-node2 python3.12[7067]: ansible-user Invoked with name=podman_basic_user uid=3001 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 11 11:28:39 managed-node2 useradd[7069]: new group: name=podman_basic_user, GID=3001 Jan 11 11:28:39 managed-node2 useradd[7069]: new user: name=podman_basic_user, UID=3001, GID=3001, home=/home/podman_basic_user, shell=/bin/bash, from=/dev/pts/0 Jan 11 11:28:40 managed-node2 python3.12[7194]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1 state=directory mode=0755 owner=podman_basic_user recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:40 managed-node2 python3.12[7319]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd2 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:41 managed-node2 python3.12[7444]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:41 managed-node2 python3.12[7569]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:41 managed-node2 python3.12[7669]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1736612921.1510544-7094-193496919768865/.source.txt _original_basename=.060zt5t3 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:42 managed-node2 python3.12[7794]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd2/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:42 managed-node2 python3.12[7894]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1736612921.821283-7094-184250787704089/.source.txt _original_basename=.wa9mfvw9 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:42 managed-node2 python3.12[8019]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd3/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:43 managed-node2 python3.12[8119]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1736612922.4798434-7094-30966835373969/.source.txt _original_basename=.8cy2dulw follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:43 managed-node2 python3.12[8244]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:28:43 managed-node2 python3.12[8369]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:28:45 managed-node2 sudo[8619]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-anvwlwokizgjnjnjtuosodawsgjcavfv ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1736612925.1132412-7210-124838245355389/AnsiballZ_dnf.py' Jan 11 11:28:45 managed-node2 sudo[8619]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:28:45 managed-node2 python3.12[8622]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:28:54 managed-node2 kernel: SELinux: Converting 385 SID table entries... Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: Converting 386 SID table entries... Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:28:57 managed-node2 setsebool[8707]: The virt_use_nfs policy boolean was changed to 1 by root Jan 11 11:28:57 managed-node2 setsebool[8707]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Jan 11 11:29:00 managed-node2 kernel: SELinux: Converting 393 SID table entries... Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:29:01 managed-node2 groupadd[8727]: group added to /etc/group: name=polkitd, GID=114 Jan 11 11:29:01 managed-node2 groupadd[8727]: group added to /etc/gshadow: name=polkitd Jan 11 11:29:01 managed-node2 groupadd[8727]: new group: name=polkitd, GID=114 Jan 11 11:29:01 managed-node2 useradd[8730]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 11 11:29:01 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:01 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:01 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1220. Jan 11 11:29:02 managed-node2 systemd[1]: Started run-p9129-i9429.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p9129-i9429.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p9129-i9429.service has finished successfully. ░░ ░░ The job identifier is 1298. Jan 11 11:29:02 managed-node2 systemctl[9130]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units. Jan 11 11:29:02 managed-node2 systemd[1]: Reload requested from client PID 9133 ('systemctl') (unit session-5.scope)... Jan 11 11:29:02 managed-node2 systemd[1]: Reloading... Jan 11 11:29:02 managed-node2 systemd-ssh-generator[9182]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:29:02 managed-node2 systemd-rc-local-generator[9179]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:29:02 managed-node2 (sd-exec-[9152]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:29:02 managed-node2 systemd[1]: Reloading finished in 195 ms. Jan 11 11:29:02 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1376. Jan 11 11:29:02 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 11 11:29:02 managed-node2 systemd[4339]: Received SIGRTMIN+25 from PID 1 (systemd). Jan 11 11:29:02 managed-node2 systemd[1]: Reloading user@0.service - User Manager for UID 0... ░░ Subject: A reload job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1454. Jan 11 11:29:02 managed-node2 systemd[4339]: Reexecuting. Jan 11 11:29:02 managed-node2 systemd[1]: Reloaded user@0.service - User Manager for UID 0. ░░ Subject: A reload job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has finished. ░░ ░░ The job identifier is 1454 and the job result is done. Jan 11 11:29:03 managed-node2 sudo[8619]: pam_unix(sudo:session): session closed for user root Jan 11 11:29:04 managed-node2 python3.12[9871]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:05 managed-node2 python3.12[10008]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:29:05 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:29:05 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1376. Jan 11 11:29:05 managed-node2 systemd[1]: run-p9129-i9429.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p9129-i9429.service has successfully entered the 'dead' state. Jan 11 11:29:05 managed-node2 python3.12[10144]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:06 managed-node2 python3.12[10277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:07 managed-node2 python3.12[10408]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:07 managed-node2 python3.12[10539]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:09 managed-node2 python3.12[10671]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:29:09 managed-node2 python3.12[10804]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:29:09 managed-node2 systemd[1]: Reload requested from client PID 10807 ('systemctl') (unit session-5.scope)... Jan 11 11:29:09 managed-node2 systemd[1]: Reloading... Jan 11 11:29:10 managed-node2 systemd-rc-local-generator[10845]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:29:10 managed-node2 systemd-ssh-generator[10851]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:29:10 managed-node2 (sd-exec-[10825]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:29:10 managed-node2 systemd[1]: Reloading finished in 188 ms. Jan 11 11:29:10 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1455. Jan 11 11:29:10 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1455. Jan 11 11:29:10 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 11 11:29:10 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1538. Jan 11 11:29:10 managed-node2 polkitd[10993]: Started polkitd version 125 Jan 11 11:29:10 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1538. Jan 11 11:29:11 managed-node2 python3.12[11045]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:29:12 managed-node2 python3.12[11177]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:13 managed-node2 python3.12[11308]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:13 managed-node2 python3.12[11439]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:14 managed-node2 python3.12[11571]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:15 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:15 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:15 managed-node2 systemd[1]: Started run-p11577-i11877.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11577-i11877.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11577-i11877.service has finished successfully. ░░ ░░ The job identifier is 1619. Jan 11 11:29:15 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1697. Jan 11 11:29:15 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:29:15 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1697. Jan 11 11:29:15 managed-node2 systemd[1]: run-p11577-i11877.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11577-i11877.service has successfully entered the 'dead' state. Jan 11 11:29:15 managed-node2 python3.12[11711]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:29:17 managed-node2 python3.12[11875]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:29:18 managed-node2 kernel: SELinux: Converting 423 SID table entries... Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:29:18 managed-node2 python3.12[12010]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:29:23 managed-node2 python3.12[12141]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:24 managed-node2 python3.12[12274]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:25 managed-node2 python3.12[12405]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:25 managed-node2 python3.12[12536]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:29:25 managed-node2 python3.12[12641]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736612965.2957008-8350-171865569281770/.source.yml _original_basename=.uofn7p8f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:26 managed-node2 python3.12[12772]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:29:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat2222882315-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2222882315-merged.mount has successfully entered the 'dead' state. Jan 11 11:29:26 managed-node2 rsyslogd[886]: imjournal: journal files changed, reloading... [v8.2412.0-1.el10 try https://www.rsyslog.com/e/0 ] Jan 11 11:29:26 managed-node2 kernel: evm: overlay not supported Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.527506046 -0500 EST m=+0.076751034 system refresh Jan 11 11:29:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck928390053-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck928390053-merged.mount has successfully entered the 'dead' state. Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.802195054 -0500 EST m=+0.351440103 image build f41b26d639b72cab2f92dd19cf0a7b368c0addd834cde4dfbe6ba9cc0b5e9755 Jan 11 11:29:26 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1776. Jan 11 11:29:26 managed-node2 systemd[1]: Created slice machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice - cgroup machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice. ░░ Subject: A start job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished successfully. ░░ ░░ The job identifier is 1775. Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.84857707 -0500 EST m=+0.397821985 container create 8b291853c44d64b81d1ceed7ffd8c7c0a03d32d84fe9fcd377032ab3ba163ed6 (image=localhost/podman-pause:5.3.1-1733097600, name=8f21b98d6977-infra, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, io.buildah.version=1.38.0) Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.854692046 -0500 EST m=+0.403936935 pod create 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:29:28 managed-node2 podman[12779]: 2025-01-11 11:29:28.907614035 -0500 EST m=+2.456859238 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 11 11:29:28 managed-node2 podman[12779]: 2025-01-11 11:29:28.930557469 -0500 EST m=+2.479802384 container create 010c205b58090946e480f7d230e44e257dd9db66fbcfe0636fb2c68d69f1bba4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Jan 11 11:29:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:31 managed-node2 python3.12[13112]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:32 managed-node2 python3.12[13249]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:35 managed-node2 python3.12[13382]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:36 managed-node2 python3.12[13514]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:29:36 managed-node2 python3.12[13647]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:29:37 managed-node2 python3.12[13780]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:29:39 managed-node2 python3.12[13911]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:40 managed-node2 python3.12[14043]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:41 managed-node2 python3.12[14175]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:29:42 managed-node2 python3.12[14335]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:29:43 managed-node2 python3.12[14466]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:29:47 managed-node2 python3.12[14597]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:49 managed-node2 podman[14738]: 2025-01-11 11:29:49.434170482 -0500 EST m=+0.252936003 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 11 11:29:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:49 managed-node2 python3.12[14876]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:50 managed-node2 python3.12[15007]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:50 managed-node2 python3.12[15138]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:29:50 managed-node2 python3.12[15243]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736612990.3622415-9267-128608785517959/.source.yml _original_basename=.lo9fbot2 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:51 managed-node2 python3.12[15374]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:29:51 managed-node2 systemd[1]: Created slice machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice - cgroup machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice. ░░ Subject: A start job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished successfully. ░░ ░░ The job identifier is 1781. Jan 11 11:29:51 managed-node2 podman[15381]: 2025-01-11 11:29:51.566032778 -0500 EST m=+0.059424724 container create 5a92effc85487f4e0d67a204339e8c66d39fd2acb0e397ba7a06494670bdc5fa (image=localhost/podman-pause:5.3.1-1733097600, name=6939fbfdbe78-infra, pod_id=6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7, io.buildah.version=1.38.0) Jan 11 11:29:51 managed-node2 podman[15381]: 2025-01-11 11:29:51.57210719 -0500 EST m=+0.065499244 pod create 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:29:52 managed-node2 podman[15381]: 2025-01-11 11:29:52.780406701 -0500 EST m=+1.273798703 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 11 11:29:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:55 managed-node2 python3.12[15650]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:56 managed-node2 python3.12[15787]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:58 managed-node2 python3.12[15920]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:00 managed-node2 python3.12[16052]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:00 managed-node2 python3.12[16185]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:01 managed-node2 python3.12[16318]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:03 managed-node2 python3.12[16449]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:03 managed-node2 python3.12[16581]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:04 managed-node2 python3.12[16713]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:06 managed-node2 python3.12[16873]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:06 managed-node2 python3.12[17004]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:11 managed-node2 python3.12[17135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:12 managed-node2 python3.12[17268]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:13 managed-node2 python3.12[17400]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 11 11:30:13 managed-node2 python3.12[17533]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:14 managed-node2 python3.12[17666]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:30:14 managed-node2 python3.12[17666]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.120968075 -0500 EST m=+0.024014018 pod stop 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:30:14 managed-node2 systemd[1]: Removed slice machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice - cgroup machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice. ░░ Subject: A stop job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished. ░░ ░░ The job identifier is 1787 and the job result is done. Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.1635176 -0500 EST m=+0.066563524 container remove 010c205b58090946e480f7d230e44e257dd9db66fbcfe0636fb2c68d69f1bba4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.18894132 -0500 EST m=+0.091987362 container remove 8b291853c44d64b81d1ceed7ffd8c7c0a03d32d84fe9fcd377032ab3ba163ed6 (image=localhost/podman-pause:5.3.1-1733097600, name=8f21b98d6977-infra, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, io.buildah.version=1.38.0) Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.198654504 -0500 EST m=+0.101700390 pod remove 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:30:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:14 managed-node2 python3.12[17813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:15 managed-node2 python3.12[17944]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:18 managed-node2 python3.12[18214]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:19 managed-node2 python3.12[18351]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:22 managed-node2 python3.12[18484]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:23 managed-node2 python3.12[18616]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:24 managed-node2 python3.12[18749]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:24 managed-node2 python3.12[18882]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:26 managed-node2 python3.12[19013]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:27 managed-node2 python3.12[19145]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:28 managed-node2 python3.12[19277]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:30 managed-node2 python3.12[19437]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:30 managed-node2 python3.12[19568]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:35 managed-node2 python3.12[19699]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:36 managed-node2 python3.12[19832]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:37 managed-node2 python3.12[19964]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 11 11:30:38 managed-node2 python3.12[20097]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:38 managed-node2 python3.12[20230]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:30:38 managed-node2 python3.12[20230]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.640117526 -0500 EST m=+0.024351398 pod stop 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:30:38 managed-node2 systemd[1]: Removed slice machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice - cgroup machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice. ░░ Subject: A stop job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished. ░░ ░░ The job identifier is 1789 and the job result is done. Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.680455077 -0500 EST m=+0.064688863 container remove 5a92effc85487f4e0d67a204339e8c66d39fd2acb0e397ba7a06494670bdc5fa (image=localhost/podman-pause:5.3.1-1733097600, name=6939fbfdbe78-infra, pod_id=6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7, io.buildah.version=1.38.0) Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.690033298 -0500 EST m=+0.074267051 pod remove 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:30:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:39 managed-node2 python3.12[20377]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:39 managed-node2 python3.12[20508]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:42 managed-node2 python3.12[20778]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:43 managed-node2 python3.12[20916]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:47 managed-node2 python3.12[21049]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:48 managed-node2 python3.12[21181]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:48 managed-node2 python3.12[21314]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:49 managed-node2 python3.12[21447]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:51 managed-node2 python3.12[21578]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:52 managed-node2 python3.12[21710]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:53 managed-node2 python3.12[21842]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:54 managed-node2 python3.12[22002]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:55 managed-node2 python3.12[22133]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:59 managed-node2 python3.12[22264]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 11 11:31:00 managed-node2 python3.12[22396]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:00 managed-node2 python3.12[22529]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:01 managed-node2 python3.12[22661]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:02 managed-node2 python3.12[22793]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:03 managed-node2 python3.12[22925]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:03 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1869. Jan 11 11:31:03 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1791. Jan 11 11:31:03 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1791. Jan 11 11:31:03 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1871. Jan 11 11:31:03 managed-node2 systemd-logind[659]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 22929. Jan 11 11:31:03 managed-node2 (systemd)[22929]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 11 11:31:03 managed-node2 systemd[22929]: Queued start job for default target default.target. Jan 11 11:31:03 managed-node2 systemd[22929]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 11 11:31:03 managed-node2 systemd[22929]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 11 11:31:03 managed-node2 systemd[22929]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:31:03 managed-node2 systemd[22929]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 11 11:31:03 managed-node2 systemd[22929]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 11 11:31:03 managed-node2 systemd[22929]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:31:03 managed-node2 systemd[22929]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:31:03 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1871. Jan 11 11:31:03 managed-node2 systemd[22929]: Startup finished in 69ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 69160 microseconds. Jan 11 11:31:04 managed-node2 python3.12[23075]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:04 managed-node2 python3.12[23206]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:05 managed-node2 sudo[23379]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebjivediwrynnjycvbbcvkpnrvrgvyty ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613064.7957578-12329-92755468246573/AnsiballZ_podman_image.py' Jan 11 11:31:05 managed-node2 sudo[23379]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 11 11:31:05 managed-node2 systemd[22929]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:31:05 managed-node2 systemd[22929]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:05 managed-node2 systemd[22929]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Ready Jan 11 11:31:05 managed-node2 systemd[22929]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-23389.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-pause-de278718.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-23405.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 11 11:31:06 managed-node2 systemd[22929]: Started podman-23430.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 11 11:31:06 managed-node2 sudo[23379]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 11 11:31:07 managed-node2 python3.12[23567]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:07 managed-node2 python3.12[23698]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:08 managed-node2 python3.12[23829]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:08 managed-node2 python3.12[23934]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613067.8332987-12432-130581053166951/.source.yml _original_basename=.vntwb0uw follow=False checksum=bd406dc7744755fdf41f83e27a5ef8497bec46ba backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:08 managed-node2 sudo[24107]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hdtiduuiveohnpfnsmzuxmnranaqsupd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613068.54518-12462-228328251565918/AnsiballZ_podman_play.py' Jan 11 11:31:08 managed-node2 sudo[24107]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 11 11:31:08 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:31:08 managed-node2 systemd[22929]: Started podman-24117.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 11 11:31:09 managed-node2 systemd[22929]: Created slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice - cgroup user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 11 11:31:09 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 11 11:31:09 managed-node2 systemd[22929]: Started rootless-netns-832a999c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 11 11:31:09 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:31:09 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 11 11:31:09 managed-node2 systemd[22929]: Started run-p24198-i24498.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 11 11:31:09 managed-node2 aardvark-dns[24198]: starting aardvark on a child with pid 24199 Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Successfully parsed config Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v6 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 11 11:31:09 managed-node2 conmon[24214]: conmon 6494e2ff6f9e6c9552ee : failed to write to /proc/self/oom_score_adj: Permission denied Jan 11 11:31:09 managed-node2 systemd[22929]: Started libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : terminal_ctrl_fd: 14 Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : winsz read side: 17, winsz write side: 18 Jan 11 11:31:09 managed-node2 systemd[22929]: Started libpod-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : container PID: 24217 Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : container 24217 exited with status 127 Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e)" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Using sqlite as database backend" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using graph driver overlay" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using transient store: false" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Initializing event backend file" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Setting parallel job count to 7" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-1ba1aabb-9ce7-ed09-ced0-c13ed359f037 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fb42127ceae097e95afb4c22c535aaa88e07cb9147b8dd0d6c01dc2c233a4302 bridge podman1 2025-01-11 11:31:08.992158363 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded 2 networks" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Received SIGHUP Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Successfully parsed config Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v4 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v6 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: No configuration found stopping the sever Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:31:09 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_fb42127c_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24_dnat\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up rootless network namespace" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully cleaned up container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Unmounted container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e)" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Shutting down engines" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24219 Jan 11 11:31:09 managed-node2 systemd[22929]: Stopping libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 62. Jan 11 11:31:09 managed-node2 systemd[22929]: Stopped libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 62 and the job result is done. Jan 11 11:31:09 managed-node2 systemd[22929]: Removed slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice - cgroup user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 61 and the job result is done. Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: [starting container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de: cannot get namespace path unless container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e is running: container is stopped] Pod: cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da Container: 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-11T11:31:08-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-11T11:31:08-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-11T11:31:08-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-11T11:31:08-05:00" level=info msg="Using sqlite as database backend" time="2025-01-11T11:31:08-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-11T11:31:08-05:00" level=debug msg="Using graph driver overlay" time="2025-01-11T11:31:08-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-11T11:31:08-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-11T11:31:08-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-11T11:31:08-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-11T11:31:08-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-11T11:31:08-05:00" level=debug msg="Using transient store: false" time="2025-01-11T11:31:08-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-11T11:31:08-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-11T11:31:08-05:00" level=debug msg="Initializing event backend file" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-11T11:31:08-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-11T11:31:08-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-11T11:31:08-05:00" level=debug msg="found free device name podman1" time="2025-01-11T11:31:08-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-11T11:31:08-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c681,c972\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container ID: 108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22" time="2025-01-11T11:31:09-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-11T11:31:09-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-11T11:31:09-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\"" time="2025-01-11T11:31:09-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-11T11:31:09-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-11T11:31:09-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\"" time="2025-01-11T11:31:09-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-11T11:31:09-05:00" level=debug msg="layer list: [\"ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0\"]" time="2025-01-11T11:31:09-05:00" level=debug msg="using \"/var/tmp/buildah4034309036\" to hold temporary data" time="2025-01-11T11:31:09-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/diff" time="2025-01-11T11:31:09-05:00" level=debug msg="layer \"ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-11T11:31:09-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-11T16:31:09.16093935Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-11T16:31:09.133319185Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-11T16:31:09.164063409Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\",\"size\":684},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-11T11:31:09-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-11T16:31:09.16093935Z\",\"container\":\"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-11T16:31:09.133319185Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-11T16:31:09.164063409Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1347,\"digest\":\"sha256:3d5363d5b8025414d8363b8279ab1974a53f63e86ef685536caca8cc6f1f8c3a\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-11T11:31:09-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-11T11:31:09-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-11T11:31:09-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-11T11:31:09-05:00" level=debug msg="Overall: allowed" time="2025-01-11T11:31:09-05:00" level=debug msg="start reading config" time="2025-01-11T11:31:09-05:00" level=debug msg="finished reading config" time="2025-01-11T11:31:09-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-11T11:31:09-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-11T11:31:09-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-11T11:31:09-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-11T11:31:09-05:00" level=debug msg="No compression detected" time="2025-01-11T11:31:09-05:00" level=debug msg="Using original blob without modification" time="2025-01-11T11:31:09-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-11T11:31:09-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-11T11:31:09-05:00" level=debug msg="No compression detected" time="2025-01-11T11:31:09-05:00" level=debug msg="Compression change for blob sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-11T11:31:09-05:00" level=debug msg="Using original blob without modification" time="2025-01-11T11:31:09-05:00" level=debug msg="setting image creation date to 2025-01-11 16:31:09.16093935 +0000 UTC" time="2025-01-11T11:31:09-05:00" level=debug msg="created new image ID \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\" with metadata \"{}\"" time="2025-01-11T11:31:09-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="printing final image id \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice for parent user.slice and name libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="using systemd mode: false" time="2025-01-11T11:31:09-05:00" level=debug msg="setting container name cec703044d41-infra" time="2025-01-11T11:31:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fb42127ceae097e95afb4c22c535aaa88e07cb9147b8dd0d6c01dc2c233a4302 bridge podman1 2025-01-11 11:31:08.992158363 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-11T11:31:09-05:00" level=debug msg="Allocated lock 1 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" has run directory \"/run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="using systemd mode: false" time="2025-01-11T11:31:09-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-11T11:31:09-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-11T11:31:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-11T11:31:09-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /proc" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /sys" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-11T11:31:09-05:00" level=debug msg="Allocated lock 2 for container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" has run directory \"/run/user/3001/containers/overlay-containers/69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Strongconnecting node 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Pushed 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e onto stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Finishing node 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e. Popped 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e off stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Strongconnecting node 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Pushed 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de onto stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Finishing node 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de. Popped 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de off stack" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/VY45JJLRBH5N22CXKVWPXHFS6U,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c471,c842\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-1ba1aabb-9ce7-ed09-ced0-c13ed359f037 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Mounted container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created root filesystem for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e at /home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged" time="2025-01-11T11:31:09-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-11T11:31:09-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-11T11:31:09-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_fb42127c_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ba:4e:01:fd:9a:cc", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport1778471301/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport1778471301/.bp.sock]\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\ntime=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=Ready\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport is ready" time="2025-01-11T11:31:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-11T11:31:09-05:00" level=debug msg="Setting Cgroups for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e to user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice:libpod:6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-11T11:31:09-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created OCI spec for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/config.json" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice for parent user.slice and name libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-11T11:31:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e -u 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata -p /run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/pidfile -n cec703044d41-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e]" time="2025-01-11T11:31:09-05:00" level=info msg="Running conmon under slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice and unitName libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-11T11:31:09-05:00" level=debug msg="Received: 24217" time="2025-01-11T11:31:09-05:00" level=info msg="Got Conmon PID as 24215" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e in OCI runtime" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-11T11:31:09-05:00" level=debug msg="Starting container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e with command [/catatonit -P]" time="2025-01-11T11:31:09-05:00" level=debug msg="Started container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/H6MLQJZURTWK5UKPYM3NBCTJTC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c471,c842\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Mounted container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created root filesystem for container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de at /home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/merged" time="2025-01-11T11:31:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Unmounted container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\"" starting container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de: cannot get namespace path unless container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e is running: container is stopped Error: failed to start 1 containers time="2025-01-11T11:31:09-05:00" level=debug msg="Shutting down engines" time="2025-01-11T11:31:09-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24117 time="2025-01-11T11:31:09-05:00" level=debug msg="Adding parallel job to stop container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding parallel job to stop container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Stopping ctr 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de (timeout 10)" time="2025-01-11T11:31:09-05:00" level=debug msg="Stopping ctr 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e (timeout 10)" time="2025-01-11T11:31:09-05:00" level=debug msg="Removing pod cgroup user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jan 11 11:31:09 managed-node2 sudo[24107]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 11 11:31:10 managed-node2 python3.12[24369]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:10 managed-node2 python3.12[24501]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:11 managed-node2 python3.12[24632]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:14 managed-node2 python3.12[24806]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:31:15 managed-node2 python3.12[24966]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:15 managed-node2 python3.12[25097]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:17 managed-node2 python3.12[25240]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 25259 ('systemctl') (unit session-5.scope)... Jan 11 11:31:19 managed-node2 systemd[1]: Reloading... Jan 11 11:31:19 managed-node2 systemd-rc-local-generator[25308]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:19 managed-node2 systemd-ssh-generator[25310]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:19 managed-node2 (sd-exec-[25280]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:19 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 11 11:31:19 managed-node2 systemd[1]: Started run-p25319-i25619.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p25319-i25619.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p25319-i25619.service has finished successfully. ░░ ░░ The job identifier is 1956. Jan 11 11:31:19 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2034. Jan 11 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 25324 ('systemctl') (unit session-5.scope)... Jan 11 11:31:19 managed-node2 systemd[1]: Reloading... Jan 11 11:31:19 managed-node2 systemd-rc-local-generator[25372]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:19 managed-node2 systemd-ssh-generator[25375]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:19 managed-node2 (sd-exec-[25348]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:19 managed-node2 systemd[1]: Reloading finished in 348 ms. Jan 11 11:31:19 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 11 11:31:20 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:31:20 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2034. Jan 11 11:31:20 managed-node2 systemd[1]: run-p25319-i25619.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p25319-i25619.service has successfully entered the 'dead' state. Jan 11 11:31:20 managed-node2 python3.12[25519]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:21 managed-node2 python3.12[25650]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:21 managed-node2 python3.12[25781]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:31:21 managed-node2 systemd[1]: Reload requested from client PID 25784 ('systemctl') (unit session-5.scope)... Jan 11 11:31:21 managed-node2 systemd[1]: Reloading... Jan 11 11:31:22 managed-node2 systemd-rc-local-generator[25833]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:22 managed-node2 systemd-ssh-generator[25835]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:22 managed-node2 (sd-exec-[25805]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:22 managed-node2 systemd[1]: Reloading finished in 206 ms. Jan 11 11:31:22 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2112. Jan 11 11:31:22 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2190. Jan 11 11:31:22 managed-node2 (rtmonger)[25843]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 11 11:31:22 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2190. Jan 11 11:31:22 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 11 11:31:22 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2112. Jan 11 11:31:22 managed-node2 python3.12[26004]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[26019]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 python3.12[26150]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 11 11:31:24 managed-node2 python3.12[26281]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 11 11:31:24 managed-node2 python3.12[26412]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 11 11:31:25 managed-node2 python3.12[26543]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:25 managed-node2 certmonger[25843]: 2025-01-11 11:31:25 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:25 managed-node2 python3.12[26675]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:25 managed-node2 python3.12[26806]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:26 managed-node2 python3.12[26937]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:26 managed-node2 python3.12[27068]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:27 managed-node2 python3.12[27199]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:29 managed-node2 python3.12[27461]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:30 managed-node2 python3.12[27598]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:31:30 managed-node2 python3.12[27730]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:32 managed-node2 python3.12[27863]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:32 managed-node2 python3.12[27994]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:33 managed-node2 python3.12[28125]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:34 managed-node2 python3.12[28257]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:31:34 managed-node2 python3.12[28390]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:31:35 managed-node2 python3.12[28523]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:31:36 managed-node2 python3.12[28654]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:31:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:42 managed-node2 python3.12[29261]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:43 managed-node2 python3.12[29394]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:44 managed-node2 python3.12[29525]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:44 managed-node2 python3.12[29630]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613103.8228424-13931-69007231641131/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:45 managed-node2 python3.12[29761]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:45 managed-node2 systemd[1]: Reload requested from client PID 29762 ('systemctl') (unit session-5.scope)... Jan 11 11:31:45 managed-node2 systemd[1]: Reloading... Jan 11 11:31:45 managed-node2 systemd-rc-local-generator[29809]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:45 managed-node2 systemd-ssh-generator[29811]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:45 managed-node2 (sd-exec-[29784]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:45 managed-node2 systemd[1]: Reloading finished in 211 ms. Jan 11 11:31:46 managed-node2 python3.12[29949]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:46 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2269. Jan 11 11:31:46 managed-node2 quadlet-demo-network[29953]: systemd-quadlet-demo Jan 11 11:31:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:46 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2269. Jan 11 11:31:47 managed-node2 python3.12[30091]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:48 managed-node2 python3.12[30224]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:48 managed-node2 python3.12[30355]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:49 managed-node2 python3.12[30460]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613108.4490383-14098-270882481689807/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:49 managed-node2 python3.12[30591]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:49 managed-node2 systemd[1]: Reload requested from client PID 30592 ('systemctl') (unit session-5.scope)... Jan 11 11:31:49 managed-node2 systemd[1]: Reloading... Jan 11 11:31:49 managed-node2 systemd-rc-local-generator[30640]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:49 managed-node2 systemd-ssh-generator[30642]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:49 managed-node2 (sd-exec-[30614]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:49 managed-node2 systemd[1]: Reloading finished in 203 ms. Jan 11 11:31:50 managed-node2 python3.12[30779]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:50 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2353. Jan 11 11:31:50 managed-node2 podman[30783]: 2025-01-11 11:31:50.493230448 -0500 EST m=+0.036573186 volume create systemd-quadlet-demo-mysql Jan 11 11:31:50 managed-node2 quadlet-demo-mysql-volume[30783]: systemd-quadlet-demo-mysql Jan 11 11:31:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:50 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2353. Jan 11 11:31:51 managed-node2 python3.12[30922]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:52 managed-node2 python3.12[31055]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 podman[31194]: 2025-01-11 11:31:59.148246293 -0500 EST m=+5.917452742 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:31:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 python3.12[31503]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:00 managed-node2 python3.12[31634]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:00 managed-node2 python3.12[31739]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613119.7690783-14333-16171380749252/.source.container _original_basename=.38c34bi8 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:00 managed-node2 python3.12[31870]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:00 managed-node2 systemd[1]: Reload requested from client PID 31871 ('systemctl') (unit session-5.scope)... Jan 11 11:32:00 managed-node2 systemd[1]: Reloading... Jan 11 11:32:01 managed-node2 systemd-ssh-generator[31920]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:01 managed-node2 systemd-rc-local-generator[31918]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:01 managed-node2 (sd-exec-[31893]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:01 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 11 11:32:01 managed-node2 python3.12[32058]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:32:01 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2437. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.741714223 -0500 EST m=+0.044057478 container create 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:01 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:01 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7742] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 11 11:32:01 managed-node2 (udev-worker)[32071]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7773] device (veth0): carrier: link connected Jan 11 11:32:01 managed-node2 (udev-worker)[32072]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7807] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7811] device (podman2): carrier: link connected Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7961] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7988] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7997] device (podman2): Activation: starting connection 'podman2' (23b26a8f-10cc-4b00-9b2c-ec9a1e9d7889) Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8002] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8026] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8029] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8032] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2523. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.724593778 -0500 EST m=+0.026937162 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:32:01 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2523. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8426] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8431] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8439] device (podman2): Activation: successful, device activated. Jan 11 11:32:01 managed-node2 systemd[1]: Started run-p32111-i32411.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32111-i32411.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32111-i32411.scope has finished successfully. ░░ ░░ The job identifier is 2602. Jan 11 11:32:01 managed-node2 systemd[1]: Started 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer - [systemd-run] /usr/bin/podman healthcheck run 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a. ░░ Subject: A start job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished successfully. ░░ ░░ The job identifier is 2608. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.940343178 -0500 EST m=+0.242686505 container init 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2437. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.967641712 -0500 EST m=+0.269985061 container start 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 quadlet-demo-mysql[32062]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: Stopped 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer - [systemd-run] /usr/bin/podman healthcheck run 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a. ░░ Subject: A stop job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished. ░░ ░░ The job identifier is 2764 and the job result is done. Jan 11 11:32:02 managed-node2 systemd[1]: run-p32111-i32411.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32111-i32411.scope has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:02 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:02 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:02 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:02 managed-node2 NetworkManager[708]: [1736613122.0781] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:02 managed-node2 podman[32123]: 2025-01-11 11:32:02.141018376 -0500 EST m=+0.176764499 container remove 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:02 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:32:02 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4cbc04ea4b763a54c154ff943a2c395535b18c6a70bc5aa97666b471ea7f3d25-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-4cbc04ea4b763a54c154ff943a2c395535b18c6a70bc5aa97666b471ea7f3d25-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: run-netns-netns\x2dd820430f\x2d8d6c\x2d6798\x2d05b8\x2d6d29a3fb9d17.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd820430f\x2d8d6c\x2d6798\x2d05b8\x2d6d29a3fb9d17.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 python3.12[32292]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:04 managed-node2 python3.12[32425]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:04 managed-node2 python3.12[32556]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:05 managed-node2 python3.12[32661]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613124.6292498-14522-101146869489364/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:06 managed-node2 python3.12[32792]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:06 managed-node2 systemd[1]: Reload requested from client PID 32793 ('systemctl') (unit session-5.scope)... Jan 11 11:32:06 managed-node2 systemd[1]: Reloading... Jan 11 11:32:06 managed-node2 systemd-rc-local-generator[32841]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:06 managed-node2 systemd-ssh-generator[32843]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:06 managed-node2 (sd-exec-[32816]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:06 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 11 11:32:07 managed-node2 python3.12[32981]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:08 managed-node2 python3.12[33114]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:09 managed-node2 python3.12[33245]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:09 managed-node2 python3.12[33350]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613129.091449-14710-252865355782244/.source.yml _original_basename=.42hwdbx1 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:10 managed-node2 python3.12[33481]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:10 managed-node2 systemd[1]: Reload requested from client PID 33482 ('systemctl') (unit session-5.scope)... Jan 11 11:32:10 managed-node2 systemd[1]: Reloading... Jan 11 11:32:10 managed-node2 systemd-rc-local-generator[33523]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:10 managed-node2 systemd-ssh-generator[33525]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:10 managed-node2 (sd-exec-[33505]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:10 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 11 11:32:11 managed-node2 python3.12[33670]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:12 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:32:12 managed-node2 python3.12[33804]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 11 11:32:13 managed-node2 python3.12[33935]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:13 managed-node2 python3.12[34066]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:26 managed-node2 podman[34204]: 2025-01-11 11:32:26.627835042 -0500 EST m=+12.566683686 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 11 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:30 managed-node2 podman[34624]: 2025-01-11 11:32:30.932431571 -0500 EST m=+3.782995336 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 11 11:32:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:31 managed-node2 python3.12[34886]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:31 managed-node2 python3.12[35017]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:32 managed-node2 python3.12[35122]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613151.519664-15332-169452422398586/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:32 managed-node2 python3.12[35253]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:32 managed-node2 systemd[1]: Reload requested from client PID 35254 ('systemctl') (unit session-5.scope)... Jan 11 11:32:32 managed-node2 systemd[1]: Reloading... Jan 11 11:32:32 managed-node2 systemd-rc-local-generator[35298]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:32 managed-node2 systemd-ssh-generator[35303]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:32 managed-node2 (sd-exec-[35276]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:32 managed-node2 systemd[1]: Reloading finished in 197 ms. Jan 11 11:32:33 managed-node2 python3.12[35441]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:32:33 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2851. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.51834238 -0500 EST m=+0.044555591 container create f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:33 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5469] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5515] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 11 11:32:33 managed-node2 (udev-worker)[35456]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5567] device (veth0): carrier: link connected Jan 11 11:32:33 managed-node2 (udev-worker)[35461]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5571] device (podman2): carrier: link connected Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5741] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5796] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5803] device (podman2): Activation: starting connection 'podman2' (27c70745-7559-4d0b-b7a4-cbb0ea3d03f9) Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5817] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5819] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5821] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5825] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.500579736 -0500 EST m=+0.026793087 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:32:33 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2853. Jan 11 11:32:33 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2853. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6225] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6231] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6241] device (podman2): Activation: successful, device activated. Jan 11 11:32:33 managed-node2 systemd[1]: Started run-p35488-i35788.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35488-i35788.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35488-i35788.scope has finished successfully. ░░ ░░ The job identifier is 2932. Jan 11 11:32:33 managed-node2 systemd[1]: Started f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer - [systemd-run] /usr/bin/podman healthcheck run f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c. ░░ Subject: A start job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished successfully. ░░ ░░ The job identifier is 2938. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.694651745 -0500 EST m=+0.220865194 container init f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2851. Jan 11 11:32:33 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 2766. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.722297794 -0500 EST m=+0.248511112 container start f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 quadlet-demo-mysql[35445]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Pods stopped: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Pods removed: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Secrets removed: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Volumes removed: Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 systemd[1]: Stopped f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer - [systemd-run] /usr/bin/podman healthcheck run f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c. ░░ Subject: A stop job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished. ░░ ░░ The job identifier is 3094 and the job result is done. Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 systemd[1]: run-p35488-i35788.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35488-i35788.scope has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:33 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.822809762 -0500 EST m=+0.087086499 volume create wp-pv-claim Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.8407] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.863234275 -0500 EST m=+0.127510628 container create 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.8704762 -0500 EST m=+0.134752517 volume create envoy-certificates Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.875665706 -0500 EST m=+0.139941907 volume create envoy-proxy-config Jan 11 11:32:33 managed-node2 systemd[1]: run-netns-netns\x2dd1335f28\x2d90ee\x2d35c9\x2dbe00\x2d6395b68083bb.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd1335f28\x2d90ee\x2d35c9\x2dbe00\x2d6395b68083bb.mount has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 systemd[1]: Created slice machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice - cgroup machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice. ░░ Subject: A start job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished successfully. ░░ ░░ The job identifier is 3096. Jan 11 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 podman[35506]: 2025-01-11 11:32:33.924485995 -0500 EST m=+0.203788737 container remove f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.952835432 -0500 EST m=+0.217111792 container create dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.959026784 -0500 EST m=+0.223303095 pod create 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:33 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.971926754 -0500 EST m=+0.236203312 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.001880823 -0500 EST m=+0.266157145 container create 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.025570871 -0500 EST m=+0.289847097 container create bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.025879964 -0500 EST m=+0.290156194 container restart 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: Started libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope - libcrun container. ░░ Subject: A start job for unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has finished successfully. ░░ ░░ The job identifier is 3102. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.005676748 -0500 EST m=+0.269953067 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.089484908 -0500 EST m=+0.353761227 container init 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.092062096 -0500 EST m=+0.356338482 container start 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1059] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 11 11:32:34 managed-node2 (udev-worker)[35480]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:34 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:34 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1209] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1260] device (veth0): carrier: link connected Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1264] device (podman2): carrier: link connected Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1357] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1372] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1389] device (podman2): Activation: starting connection 'podman2' (134e0605-059a-48c0-82b6-94816cad0d2e) Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1391] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1398] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1400] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1439] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1521] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1524] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1533] device (podman2): Activation: successful, device activated. Jan 11 11:32:34 managed-node2 podman[35557]: 2025-01-11 11:32:34.183445487 -0500 EST m=+0.072742073 container died 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: Started run-p35583-i35883.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35583-i35883.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35583-i35883.scope has finished successfully. ░░ ░░ The job identifier is 3108. Jan 11 11:32:34 managed-node2 systemd[1]: Started libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope - libcrun container. ░░ Subject: A start job for unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has finished successfully. ░░ ░░ The job identifier is 3114. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.432622644 -0500 EST m=+0.696899007 container init dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.435661679 -0500 EST m=+0.699937978 container start dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:34 managed-node2 systemd[1]: libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f696c5390a0b653138a6cbb7e05d386e6bad58005fe7e13a67ebd2d4d3cb9772-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f696c5390a0b653138a6cbb7e05d386e6bad58005fe7e13a67ebd2d4d3cb9772-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.975381632 -0500 EST m=+1.239657886 container died dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:37 managed-node2 podman[35557]: 2025-01-11 11:32:37.235697756 -0500 EST m=+3.124994479 container cleanup 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:37 managed-node2 systemd[1]: run-p35583-i35883.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35583-i35883.scope has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:37 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:37 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:37 managed-node2 NetworkManager[708]: [1736613157.2649] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:37 managed-node2 systemd[1]: run-netns-netns\x2d680699bb\x2daab9\x2d0c5f\x2dd1a3\x2db50c1af0808c.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d680699bb\x2daab9\x2d0c5f\x2dd1a3\x2db50c1af0808c.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0f36b4c12f263560636bfed166bcdfec6d2a4d903ee8b4175f38e620256ba1cd-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0f36b4c12f263560636bfed166bcdfec6d2a4d903ee8b4175f38e620256ba1cd-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 podman[35592]: 2025-01-11 11:32:37.3318675 -0500 EST m=+2.694111916 container cleanup dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8e11689c21ad0fadb74f350e962e8358d7ff4a808f195ba3097c1ccf92a7cabb-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-8e11689c21ad0fadb74f350e962e8358d7ff4a808f195ba3097c1ccf92a7cabb-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: [starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped] Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: [starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped starting container bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped] Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Volumes: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: wp-pv-claim Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Pod: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Containers: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: starting container bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Error: failed to start 2 containers Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Jan 11 11:32:37 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 2766. Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.438028591 -0500 EST m=+0.037306380 pod stop 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice - cgroup machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice. ░░ Subject: A stop job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished. ░░ ░░ The job identifier is 3121 and the job result is done. Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: time="2025-01-11T11:32:37-05:00" level=error msg="Checking whether service of container 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea can be stopped: getting exit code of container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 from DB: no such exit code" Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.487178676 -0500 EST m=+0.086456103 container remove 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.502328852 -0500 EST m=+0.101606262 container remove bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.523333084 -0500 EST m=+0.122610503 container remove dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.531503391 -0500 EST m=+0.130780769 pod remove 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.551667661 -0500 EST m=+0.150945073 container remove 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Pods stopped: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Pods removed: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Secrets removed: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Volumes removed: Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:37 managed-node2 python3.12[35760]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:38 managed-node2 python3.12[35892]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:38 managed-node2 python3.12[36030]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:39 managed-node2 python3.12[36169]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:39 managed-node2 systemd[4339]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:32:39 managed-node2 systemd[4339]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:32:39 managed-node2 systemd[4339]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:32:39 managed-node2 python3.12[36310]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:40 managed-node2 python3.12[36444]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:45 managed-node2 python3.12[36575]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:47 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:32:51 managed-node2 python3.12[36707]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:56 managed-node2 python3.12[36838]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:01 managed-node2 python3.12[36969]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:07 managed-node2 python3.12[37100]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:12 managed-node2 python3.12[37231]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:13 managed-node2 python3.12[37362]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None TASK [Check] ******************************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:148 Saturday 11 January 2025 11:33:13 -0500 (0:00:00.458) 0:02:00.041 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "-a" ], "delta": "0:00:00.036923", "end": "2025-01-11 11:33:13.645444", "rc": 0, "start": "2025-01-11 11:33:13.608521" } STDOUT: CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES TASK [Check pods] ************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:152 Saturday 11 January 2025 11:33:13 -0500 (0:00:00.423) 0:02:00.464 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "pod", "ps", "--ctr-ids", "--ctr-names", "--ctr-status" ], "delta": "0:00:00.033117", "end": "2025-01-11 11:33:14.047317", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:33:14.014200" } STDOUT: POD ID NAME STATUS CREATED INFRA ID IDS NAMES STATUS TASK [Check systemd] *********************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:157 Saturday 11 January 2025 11:33:14 -0500 (0:00:00.398) 0:02:00.863 ****** ok: [managed-node2] => { "changed": false, "cmd": "set -euo pipefail; systemctl list-units --all | grep quadlet", "delta": "0:00:00.013792", "end": "2025-01-11 11:33:14.425777", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:33:14.411985" } STDOUT: quadlet-demo-mysql-volume.service loaded active exited quadlet-demo-mysql-volume.service ● quadlet-demo-mysql.service loaded failed failed quadlet-demo-mysql.service quadlet-demo-network.service loaded active exited quadlet-demo-network.service ● quadlet-demo.service loaded failed failed quadlet-demo.service TASK [LS] ********************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:165 Saturday 11 January 2025 11:33:14 -0500 (0:00:00.379) 0:02:01.242 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "ls", "-alrtF", "/etc/systemd/system" ], "delta": "0:00:00.004003", "end": "2025-01-11 11:33:14.797409", "failed_when_result": false, "rc": 0, "start": "2025-01-11 11:33:14.793406" } STDOUT: total 12 drwxr-xr-x. 5 root root 47 Jan 9 09:16 ../ drwxr-xr-x. 2 root root 32 Jan 9 09:16 getty.target.wants/ lrwxrwxrwx. 1 root root 43 Jan 9 09:16 dbus.service -> /usr/lib/systemd/system/dbus-broker.service lrwxrwxrwx. 1 root root 37 Jan 9 09:16 ctrl-alt-del.target -> /usr/lib/systemd/system/reboot.target drwxr-xr-x. 2 root root 48 Jan 9 09:17 network-online.target.wants/ lrwxrwxrwx. 1 root root 57 Jan 9 09:17 dbus-org.freedesktop.nm-dispatcher.service -> /usr/lib/systemd/system/NetworkManager-dispatcher.service drwxr-xr-x. 2 root root 76 Jan 9 09:17 timers.target.wants/ drwxr-xr-x. 2 root root 38 Jan 9 09:17 dev-virtio\x2dports-org.qemu.guest_agent.0.device.wants/ lrwxrwxrwx. 1 root root 41 Jan 9 09:20 default.target -> /usr/lib/systemd/system/multi-user.target drwxr-xr-x. 2 root root 31 Jan 9 09:24 remote-fs.target.wants/ drwxr-xr-x. 2 root root 119 Jan 9 09:25 cloud-init.target.wants/ drwxr-xr-x. 2 root root 4096 Jan 9 09:25 sysinit.target.wants/ drwxr-xr-x. 2 root root 113 Jan 11 11:29 sockets.target.wants/ lrwxrwxrwx. 1 root root 41 Jan 11 11:29 dbus-org.fedoraproject.FirewallD1.service -> /usr/lib/systemd/system/firewalld.service drwxr-xr-x. 11 root root 4096 Jan 11 11:29 ./ drwxr-xr-x. 2 root root 4096 Jan 11 11:31 multi-user.target.wants/ TASK [Cleanup] ***************************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:172 Saturday 11 January 2025 11:33:14 -0500 (0:00:00.369) 0:02:01.611 ****** included: fedora.linux_system_roles.podman for managed-node2 TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:3 Saturday 11 January 2025 11:33:14 -0500 (0:00:00.075) 0:02:01.687 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure ansible_facts used by role] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:3 Saturday 11 January 2025 11:33:14 -0500 (0:00:00.050) 0:02:01.737 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if system is ostree] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:11 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.038) 0:02:01.775 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:16 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.032) 0:02:01.807 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:23 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.030) 0:02:01.838 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:28 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.096) 0:02:01.935 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set platform/version specific variables] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/set_vars.yml:32 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.052) 0:02:01.987 ****** ok: [managed-node2] => (item=RedHat.yml) => { "ansible_facts": { "__podman_packages": [ "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/RedHat.yml" ], "ansible_loop_var": "item", "changed": false, "item": "RedHat.yml" } skipping: [managed-node2] => (item=CentOS.yml) => { "ansible_loop_var": "item", "changed": false, "false_condition": "__vars_file is file", "item": "CentOS.yml", "skip_reason": "Conditional result was False" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } ok: [managed-node2] => (item=CentOS_10.yml) => { "ansible_facts": { "__podman_packages": [ "iptables-nft", "podman", "shadow-utils-subid" ] }, "ansible_included_var_files": [ "/tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/vars/CentOS_10.yml" ], "ansible_loop_var": "item", "changed": false, "item": "CentOS_10.yml" } TASK [fedora.linux_system_roles.podman : Gather the package facts] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 Saturday 11 January 2025 11:33:15 -0500 (0:00:00.102) 0:02:02.090 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Enable copr if requested] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:10 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.929) 0:02:03.019 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_use_copr | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Ensure required packages are installed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:14 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.036) 0:02:03.056 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "(__podman_packages | difference(ansible_facts.packages))", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:28 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.043) 0:02:03.099 ****** skipping: [managed-node2] => { "false_condition": "__podman_is_transactional | d(false)" } TASK [fedora.linux_system_roles.podman : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:33 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.034) 0:02:03.134 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:38 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.035) 0:02:03.169 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:46 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.031) 0:02:03.201 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "--version" ], "delta": "0:00:00.024168", "end": "2025-01-11 11:33:16.784576", "rc": 0, "start": "2025-01-11 11:33:16.760408" } STDOUT: podman version 5.3.1 TASK [fedora.linux_system_roles.podman : Set podman version] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:52 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.406) 0:02:03.607 ****** ok: [managed-node2] => { "ansible_facts": { "podman_version": "5.3.1" }, "changed": false } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.2 or later] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:56 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.035) 0:02:03.643 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.2\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:63 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.032) 0:02:03.675 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_version is version(\"4.4\", \"<\")", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 4.4 or later for quadlet, secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:73 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.036) 0:02:03.712 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:80 Saturday 11 January 2025 11:33:16 -0500 (0:00:00.036) 0:02:03.748 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__has_type_pod or __has_pod_file_ext or __has_pod_file_src_ext or __has_pod_template_src_ext or __has_pod_template_src_ext_j2", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Podman package version must be 5.0 or later for Pod quadlets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:96 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.053) 0:02:03.802 ****** META: end_host conditional evaluated to False, continuing execution for managed-node2 skipping: [managed-node2] => { "skip_reason": "end_host conditional evaluated to False, continuing execution for managed-node2" } MSG: end_host conditional evaluated to false, continuing execution for managed-node2 TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:109 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.049) 0:02:03.851 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.122) 0:02:03.973 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.048) 0:02:04.021 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.039) 0:02:04.061 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.055) 0:02:04.116 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.406) 0:02:04.523 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.041) 0:02:04.564 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.039) 0:02:04.604 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.039) 0:02:04.644 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.035) 0:02:04.679 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.030) 0:02:04.710 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:17 -0500 (0:00:00.031) 0:02:04.742 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.031) 0:02:04.773 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set config file paths] **************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:115 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.031) 0:02:04.805 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_container_conf_file": "/etc/containers/containers.conf.d/50-systemroles.conf", "__podman_policy_json_file": "/etc/containers/policy.json", "__podman_registries_conf_file": "/etc/containers/registries.conf.d/50-systemroles.conf", "__podman_storage_conf_file": "/etc/containers/storage.conf" }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle container.conf.d] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:124 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.038) 0:02:04.843 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure containers.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:5 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.059) 0:02:04.903 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update container config file] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_container_conf_d.yml:13 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.040) 0:02:04.943 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_containers_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle registries.conf.d] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:127 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.047) 0:02:04.991 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure registries.d exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:5 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.064) 0:02:05.055 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update registries config file] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_registries_conf_d.yml:13 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.034) 0:02:05.090 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_registries_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle storage.conf] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:130 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.070) 0:02:05.161 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure storage.conf parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:5 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.059) 0:02:05.220 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Update storage config file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_storage_conf.yml:13 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.031) 0:02:05.251 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_storage_conf | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Handle policy.json] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:133 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.030) 0:02:05.282 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Ensure policy.json parent dir exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:6 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.102) 0:02:05.384 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat the policy.json file] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:14 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.048) 0:02:05.433 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get the existing policy.json] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:19 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.036) 0:02:05.470 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Write new policy.json file] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_policy_json.yml:25 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.033) 0:02:05.503 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_policy_json | length > 0", "skip_reason": "Conditional result was False" } TASK [Manage firewall for specified ports] ************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:139 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.036) 0:02:05.539 ****** included: fedora.linux_system_roles.firewall for managed-node2 TASK [fedora.linux_system_roles.firewall : Setup firewalld] ******************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:2 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.108) 0:02:05.648 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml for managed-node2 TASK [fedora.linux_system_roles.firewall : Ensure ansible_facts used by role] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:2 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.053) 0:02:05.702 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_required_facts | difference(ansible_facts.keys() | list) | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if system is ostree] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:10 Saturday 11 January 2025 11:33:18 -0500 (0:00:00.037) 0:02:05.739 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag to indicate system is ostree] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:15 Saturday 11 January 2025 11:33:19 -0500 (0:00:00.030) 0:02:05.770 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_ostree is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Check if transactional-update exists in /sbin] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:22 Saturday 11 January 2025 11:33:19 -0500 (0:00:00.032) 0:02:05.802 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Set flag if transactional-update exists] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:27 Saturday 11 January 2025 11:33:19 -0500 (0:00:00.030) 0:02:05.833 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __firewall_is_transactional is defined", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Install firewalld] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:31 Saturday 11 January 2025 11:33:19 -0500 (0:00:00.030) 0:02:05.863 ****** ok: [managed-node2] => { "changed": false, "rc": 0, "results": [] } MSG: Nothing to do lsrpackages: firewalld TASK [fedora.linux_system_roles.firewall : Notify user that reboot is needed to apply changes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:43 Saturday 11 January 2025 11:33:19 -0500 (0:00:00.790) 0:02:06.654 ****** skipping: [managed-node2] => { "false_condition": "__firewall_is_transactional | d(false)" } TASK [fedora.linux_system_roles.firewall : Reboot transactional update systems] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:48 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.105) 0:02:06.760 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Fail if reboot is needed and not set] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/firewalld.yml:53 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.035) 0:02:06.796 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_is_transactional | d(false)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Collect service facts] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:5 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.033) 0:02:06.830 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Attempt to stop and disable conflicting services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:9 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.032) 0:02:06.863 ****** skipping: [managed-node2] => (item=nftables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "nftables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=iptables) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "iptables", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item=ufw) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall_disable_conflicting_services | bool", "item": "ufw", "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Unmask firewalld service] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:22 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.044) 0:02:06.908 ****** ok: [managed-node2] => { "changed": false, "name": "firewalld", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:29:10 EST", "ActiveEnterTimestampMonotonic": "271605433", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.socket basic.target dbus-broker.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:29:10 EST", "AssertTimestampMonotonic": "271333792", "Before": "shutdown.target network-pre.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "645628000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:29:10 EST", "ConditionTimestampMonotonic": "271333789", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ebtables.service shutdown.target ipset.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainHandoffTimestampMonotonic": "271363582", "ExecMainPID": "10862", "ExecMainStartTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainStartTimestampMonotonic": "271336631", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:29:10 EST", "InactiveExitTimestampMonotonic": "271337679", "InvocationID": "f2f268445be048bc90d6f558b31f85a8", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10862", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3108925440", "MemoryCurrent": "35250176", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35942400", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:32:32 EST", "StateChangeTimestampMonotonic": "474102680", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Enable and start firewalld service] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:28 Saturday 11 January 2025 11:33:20 -0500 (0:00:00.589) 0:02:07.497 ****** ok: [managed-node2] => { "changed": false, "enabled": true, "name": "firewalld", "state": "started", "status": { "AccessSELinuxContext": "system_u:object_r:firewalld_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:29:10 EST", "ActiveEnterTimestampMonotonic": "271605433", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "polkit.service dbus.socket basic.target dbus-broker.service sysinit.target system.slice", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:29:10 EST", "AssertTimestampMonotonic": "271333792", "Before": "shutdown.target network-pre.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "BusName": "org.fedoraproject.FirewallD1", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "645628000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "yes", "CanReload": "yes", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_tty_config cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:29:10 EST", "ConditionTimestampMonotonic": "271333789", "ConfigurationDirectoryMode": "0755", "Conflicts": "iptables.service ebtables.service shutdown.target ipset.service ip6tables.service", "ControlGroup": "/system.slice/firewalld.service", "ControlGroupId": "4516", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "firewalld - dynamic firewall daemon", "DeviceAllow": "char-rtc r", "DevicePolicy": "closed", "Documentation": "\"man:firewalld(1)\"", "DynamicUser": "no", "EffectiveCPUs": "0-1", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveMemoryNodes": "0", "EffectiveTasksMax": "22365", "EnvironmentFiles": "/etc/sysconfig/firewalld (ignore_errors=yes)", "ExecMainCode": "0", "ExecMainExitTimestampMonotonic": "0", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainHandoffTimestampMonotonic": "271363582", "ExecMainPID": "10862", "ExecMainStartTimestamp": "Sat 2025-01-11 11:29:10 EST", "ExecMainStartTimestampMonotonic": "271336631", "ExecMainStatus": "0", "ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecReloadEx": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStart": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/sbin/firewalld ; argv[]=/usr/sbin/firewalld --nofork --nopid $FIREWALLD_ARGS ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/usr/lib/systemd/system/firewalld.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "firewalld.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:29:10 EST", "InactiveExitTimestampMonotonic": "271337679", "InvocationID": "f2f268445be048bc90d6f558b31f85a8", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "yes", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "10862", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3109974016", "MemoryCurrent": "35250176", "MemoryDenyWriteExecute": "yes", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "35942400", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "0", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "0", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "firewalld.service dbus-org.fedoraproject.FirewallD1.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "yes", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "yes", "ProtectControlGroups": "yes", "ProtectControlGroupsEx": "yes", "ProtectHome": "yes", "ProtectHostname": "yes", "ProtectKernelLogs": "yes", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "yes", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "dbus.socket sysinit.target system.slice", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "yes", "RestrictSUIDSGID": "yes", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "StandardError": "null", "StandardInput": "null", "StandardOutput": "null", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:32:32 EST", "StateChangeTimestampMonotonic": "474102680", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "running", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallArchitectures": "native", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "2", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "dbus", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "enabled", "UnitFileState": "enabled", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-pre.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.firewall : Check if previous replaced is defined] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:34 Saturday 11 January 2025 11:33:21 -0500 (0:00:00.562) 0:02:08.060 ****** ok: [managed-node2] => { "ansible_facts": { "__firewall_previous_replaced": false, "__firewall_python_cmd": "/usr/bin/python3.12", "__firewall_report_changed": true }, "changed": false } TASK [fedora.linux_system_roles.firewall : Get config files, checksums before and remove] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:43 Saturday 11 January 2025 11:33:21 -0500 (0:00:00.039) 0:02:08.099 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Tell firewall module it is able to report changed] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:55 Saturday 11 January 2025 11:33:21 -0500 (0:00:00.029) 0:02:08.129 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Configure firewall] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 Saturday 11 January 2025 11:33:21 -0500 (0:00:00.028) 0:02:08.157 ****** ok: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "8000/tcp", "state": "enabled" } } ok: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "__firewall_changed": false, "ansible_loop_var": "item", "changed": false, "item": { "port": "9000/tcp", "state": "enabled" } } TASK [fedora.linux_system_roles.firewall : Gather firewall config information] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:120 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.997) 0:02:09.155 ****** skipping: [managed-node2] => (item={'port': '8000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "8000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => (item={'port': '9000/tcp', 'state': 'enabled'}) => { "ansible_loop_var": "item", "changed": false, "false_condition": "firewall | length == 1", "item": { "port": "9000/tcp", "state": "enabled" }, "skip_reason": "Conditional result was False" } skipping: [managed-node2] => { "changed": false } MSG: All items skipped TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:130 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.083) 0:02:09.239 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall | length == 1", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Gather firewall config if no arguments] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:139 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.055) 0:02:09.294 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Update firewalld_config fact] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:144 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.059) 0:02:09.354 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "firewall == None or firewall | length == 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Get config files, checksums after] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:153 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.057) 0:02:09.411 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Calculate what has changed] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:163 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.052) 0:02:09.464 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__firewall_previous_replaced | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.firewall : Show diffs] ************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:169 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.053) 0:02:09.518 ****** skipping: [managed-node2] => { "false_condition": "__firewall_previous_replaced | bool" } TASK [Manage selinux for specified ports] ************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:146 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.109) 0:02:09.627 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "podman_selinux_ports | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Keep track of users that need to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:153 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.049) 0:02:09.677 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_cancel_user_linger": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - present] ******* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:157 Saturday 11 January 2025 11:33:22 -0500 (0:00:00.050) 0:02:09.728 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle credential files - present] **** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:166 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.049) 0:02:09.777 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle secrets] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:175 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.115) 0:02:09.892 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.115) 0:02:10.007 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.040) 0:02:10.048 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.054) 0:02:10.102 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.034) 0:02:10.137 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.034) 0:02:10.171 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.040) 0:02:10.212 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.033) 0:02:10.246 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.047) 0:02:10.293 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.051) 0:02:10.344 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.050) 0:02:10.395 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.053) 0:02:10.448 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.036) 0:02:10.485 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.036) 0:02:10.522 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.035) 0:02:10.558 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.035) 0:02:10.593 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.059) 0:02:10.652 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:23 -0500 (0:00:00.054) 0:02:10.707 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.065) 0:02:10.773 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.033) 0:02:10.807 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.049) 0:02:10.856 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.050) 0:02:10.907 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.472) 0:02:11.380 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.055) 0:02:11.436 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.091) 0:02:11.528 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.053) 0:02:11.581 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:24 -0500 (0:00:00.057) 0:02:11.639 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.115) 0:02:11.754 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.050) 0:02:11.804 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.054) 0:02:11.858 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.053) 0:02:11.912 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.054) 0:02:11.967 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.051) 0:02:12.018 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.052) 0:02:12.071 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.050) 0:02:12.121 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.051) 0:02:12.173 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.051) 0:02:12.225 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.069) 0:02:12.294 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.162) 0:02:12.457 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.049) 0:02:12.506 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.050) 0:02:12.556 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.050) 0:02:12.607 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:33:25 -0500 (0:00:00.051) 0:02:12.659 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Set variables part 1] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:3 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.484) 0:02:13.143 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:7 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.058) 0:02:13.201 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.093) 0:02:13.295 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.053) 0:02:13.348 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.054) 0:02:13.403 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.071) 0:02:13.474 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.048) 0:02:13.523 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.052) 0:02:13.576 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.053) 0:02:13.629 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.051) 0:02:13.681 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:26 -0500 (0:00:00.052) 0:02:13.734 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.053) 0:02:13.787 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.060) 0:02:13.847 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.067) 0:02:13.915 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_check_subids | d(true)", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set variables part 2] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:14 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.051) 0:02:13.966 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_rootless": false, "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:20 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.064) 0:02:14.031 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.164) 0:02:14.195 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.049) 0:02:14.244 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.043) 0:02:14.288 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:25 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.033) 0:02:14.322 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Manage each secret] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_secret.yml:41 Saturday 11 January 2025 11:33:27 -0500 (0:00:00.034) 0:02:14.356 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Handle Kubernetes specifications] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:182 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.441) 0:02:14.797 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle Quadlet specifications] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:189 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.045) 0:02:14.843 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml for managed-node2 => (item=(censored due to no_log)) TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.276) 0:02:15.119 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.kube", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Unit]\nRequires=quadlet-demo-mysql.service\nAfter=quadlet-demo-mysql.service\n\n[Kube]\n# Point to the yaml file in the same directory\nYaml=quadlet-demo.yml\n# Use the quadlet-demo network\nNetwork=quadlet-demo.network\n# Publish the envoy proxy data port\nPublishPort=8000:8080\n# Publish the envoy proxy admin port\nPublishPort=9000:9901\n# Use the envoy proxy config map in the same directory\nConfigMap=envoy-proxy-configmap.yml", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.072) 0:02:15.191 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.062) 0:02:15.253 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.052) 0:02:15.306 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "kube", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.075) 0:02:15.381 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.095) 0:02:15.476 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.058) 0:02:15.535 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.059) 0:02:15.594 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:28 -0500 (0:00:00.071) 0:02:15.665 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.427) 0:02:16.093 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.092) 0:02:16.185 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.040) 0:02:16.225 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.037) 0:02:16.263 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.032) 0:02:16.296 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.032) 0:02:16.328 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.031) 0:02:16.360 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.033) 0:02:16.393 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.034) 0:02:16.427 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": [ "quadlet-demo.yml" ], "__podman_service_name": "quadlet-demo.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.055) 0:02:16.483 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.041) 0:02:16.525 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.030) 0:02:16.555 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.kube", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.077) 0:02:16.633 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.037) 0:02:16.671 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:33:29 -0500 (0:00:00.072) 0:02:16.743 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:33:30 -0500 (0:00:00.032) 0:02:16.776 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:32:37 EST", "ActiveEnterTimestampMonotonic": "478557264", "ActiveExitTimestamp": "Sat 2025-01-11 11:32:37 EST", "ActiveExitTimestampMonotonic": "478563248", "ActiveState": "failed", "After": "sysinit.target quadlet-demo-network.service system.slice basic.target -.mount systemd-journald.socket network-online.target quadlet-demo-mysql.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:32:33 EST", "AssertTimestampMonotonic": "474897514", "Before": "shutdown.target multi-user.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "487513000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:32:33 EST", "ConditionTimestampMonotonic": "474897511", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "8567", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-11 11:32:37 EST", "ExecMainExitTimestampMonotonic": "478561737", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:32:33 EST", "ExecMainHandoffTimestampMonotonic": "474907592", "ExecMainPID": "35507", "ExecMainStartTimestamp": "Sat 2025-01-11 11:32:33 EST", "ExecMainStartTimestampMonotonic": "474898632", "ExecMainStatus": "125", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-01-11 11:32:33 EST] ; stop_time=[Sat 2025-01-11 11:32:37 EST] ; pid=35507 ; code=exited ; status=125 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube play --replace --service-container=true --network systemd-quadlet-demo --configmap /etc/containers/systemd/envoy-proxy-configmap.yml --publish 8000:8080 --publish 9000:9901 /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-01-11 11:32:33 EST] ; stop_time=[Sat 2025-01-11 11:32:37 EST] ; pid=35507 ; code=exited ; status=125 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; ignore_errors=no ; start_time=[Sat 2025-01-11 11:32:37 EST] ; stop_time=[Sat 2025-01-11 11:32:37 EST] ; pid=35618 ; code=exited ; status=0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman kube down /etc/containers/systemd/quadlet-demo.yml ; flags= ; start_time=[Sat 2025-01-11 11:32:37 EST] ; stop_time=[Sat 2025-01-11 11:32:37 EST] ; pid=35618 ; code=exited ; status=0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-11 11:32:37 EST", "InactiveEnterTimestampMonotonic": "478763663", "InactiveExitTimestamp": "Sat 2025-01-11 11:32:33 EST", "InactiveExitTimestampMonotonic": "474899454", "InvocationID": "ffc6e2f8d42d4d12b57895c06967ede4", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3123834880", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "53538816", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-network.service sysinit.target quadlet-demo-mysql.service -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.kube", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:32:37 EST", "StateChangeTimestampMonotonic": "478763663", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:33:30 -0500 (0:00:00.807) 0:02:17.583 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613152.094372, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7", "ctime": 1736613152.096372, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 432013987, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613151.830373, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.kube", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 456, "uid": 0, "version": "1132633464", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.429) 0:02:18.013 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.072) 0:02:18.085 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.414) 0:02:18.500 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.049) 0:02:18.549 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.031) 0:02:18.581 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:33:31 -0500 (0:00:00.030) 0:02:18.611 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.kube", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:32 -0500 (0:00:00.407) 0:02:19.019 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:33:33 -0500 (0:00:00.756) 0:02:19.776 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:33:33 -0500 (0:00:00.040) 0:02:19.817 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:33 -0500 (0:00:00.067) 0:02:19.884 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:33:33 -0500 (0:00:00.051) 0:02:19.935 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.904083", "end": "2025-01-11 11:33:34.430140", "rc": 0, "start": "2025-01-11 11:33:33.526057" } STDOUT: f41b26d639b72cab2f92dd19cf0a7b368c0addd834cde4dfbe6ba9cc0b5e9755 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:33:34 -0500 (0:00:01.326) 0:02:21.262 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.059) 0:02:21.321 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.030) 0:02:21.351 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.028) 0:02:21.380 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:33:34 -0500 (0:00:00.029) 0:02:21.410 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.035093", "end": "2025-01-11 11:33:35.009294", "rc": 0, "start": "2025-01-11 11:33:34.974201" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.436) 0:02:21.847 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.030835", "end": "2025-01-11 11:33:35.457497", "rc": 0, "start": "2025-01-11 11:33:35.426662" } STDOUT: local systemd-quadlet-demo-mysql local wp-pv-claim local envoy-certificates local envoy-proxy-config TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.428) 0:02:22.276 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032667", "end": "2025-01-11 11:33:35.890190", "rc": 0, "start": "2025-01-11 11:33:35.857523" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:33:35 -0500 (0:00:00.430) 0:02:22.706 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.033217", "end": "2025-01-11 11:33:36.314371", "rc": 0, "start": "2025-01-11 11:33:36.281154" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.468) 0:02:23.174 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:33:36 -0500 (0:00:00.455) 0:02:23.630 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:33:37 -0500 (0:00:00.464) 0:02:24.094 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:39 -0500 (0:00:02.363) 0:02:26.457 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.033) 0:02:26.491 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: PersistentVolumeClaim\nmetadata:\n name: wp-pv-claim\n labels:\n app: wordpress\nspec:\n accessModes:\n - ReadWriteOnce\n resources:\n requests:\n storage: 20Gi\n---\napiVersion: v1\nkind: Pod\nmetadata:\n name: quadlet-demo\nspec:\n containers:\n - name: wordpress\n image: quay.io/linux-system-roles/wordpress:4.8-apache\n env:\n - name: WORDPRESS_DB_HOST\n value: quadlet-demo-mysql\n - name: WORDPRESS_DB_PASSWORD\n valueFrom:\n secretKeyRef:\n name: mysql-root-password-kube\n key: password\n volumeMounts:\n - name: wordpress-persistent-storage\n mountPath: /var/www/html\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n - name: envoy\n image: quay.io/linux-system-roles/envoyproxy:v1.25.0\n volumeMounts:\n - name: config-volume\n mountPath: /etc/envoy\n - name: certificates\n mountPath: /etc/envoy-certificates\n env:\n - name: ENVOY_UID\n value: \"0\"\n resources:\n requests:\n memory: \"64Mi\"\n cpu: \"250m\"\n limits:\n memory: \"128Mi\"\n cpu: \"500m\"\n volumes:\n - name: config-volume\n configMap:\n name: envoy-proxy-config\n - name: certificates\n secret:\n secretName: envoy-certificates\n - name: wordpress-persistent-storage\n persistentVolumeClaim:\n claimName: wp-pv-claim\n - name: www # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3\n - name: create # not used - for testing hostpath\n hostPath:\n path: /tmp/httpd3-create\n", "__podman_quadlet_template_src": "quadlet-demo.yml.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.132) 0:02:26.624 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:39 -0500 (0:00:00.071) 0:02:26.695 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.055) 0:02:26.751 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.063) 0:02:26.815 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.066) 0:02:26.882 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.040) 0:02:26.922 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.041) 0:02:26.964 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.053) 0:02:27.017 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.406) 0:02:27.424 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.043) 0:02:27.468 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.038) 0:02:27.506 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.038) 0:02:27.544 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.035) 0:02:27.580 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.035) 0:02:27.615 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.084) 0:02:27.700 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:40 -0500 (0:00:00.039) 0:02:27.739 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.036) 0:02:27.776 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.060) 0:02:27.836 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.036) 0:02:27.872 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.031) 0:02:27.904 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.079) 0:02:27.984 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.045) 0:02:28.029 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.120) 0:02:28.149 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.040) 0:02:28.190 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.047) 0:02:28.238 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613132.271455, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "998dccde0483b1654327a46ddd89cbaa47650370", "ctime": 1736613129.6714659, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 377487587, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613129.402467, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1605, "uid": 0, "version": "380823053", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.390) 0:02:28.628 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:33:41 -0500 (0:00:00.059) 0:02:28.688 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.365) 0:02:29.054 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.087) 0:02:29.141 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.047) 0:02:29.188 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.041) 0:02:29.230 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:42 -0500 (0:00:00.440) 0:02:29.670 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.776) 0:02:30.447 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:33:43 -0500 (0:00:00.064) 0:02:30.511 ****** changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => (item=None) => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:45 -0500 (0:00:01.264) 0:02:31.775 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.034) 0:02:31.810 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.031428", "end": "2025-01-11 11:33:45.403231", "rc": 0, "start": "2025-01-11 11:33:45.371803" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.414) 0:02:32.224 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.064) 0:02:32.288 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.033) 0:02:32.322 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.041) 0:02:32.364 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:33:45 -0500 (0:00:00.031) 0:02:32.395 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.033102", "end": "2025-01-11 11:33:45.987374", "rc": 0, "start": "2025-01-11 11:33:45.954272" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:33:46 -0500 (0:00:00.412) 0:02:32.808 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.031760", "end": "2025-01-11 11:33:46.397672", "rc": 0, "start": "2025-01-11 11:33:46.365912" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:33:46 -0500 (0:00:00.410) 0:02:33.219 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.032197", "end": "2025-01-11 11:33:46.810711", "rc": 0, "start": "2025-01-11 11:33:46.778514" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:33:46 -0500 (0:00:00.410) 0:02:33.629 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.030264", "end": "2025-01-11 11:33:47.214979", "rc": 0, "start": "2025-01-11 11:33:47.184715" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:33:47 -0500 (0:00:00.452) 0:02:34.081 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:33:47 -0500 (0:00:00.412) 0:02:34.493 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:33:48 -0500 (0:00:00.411) 0:02:34.905 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:50 -0500 (0:00:01.978) 0:02:36.883 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.033) 0:02:36.916 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "envoy-proxy-configmap.yml", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "---\napiVersion: v1\nkind: ConfigMap\nmetadata:\n name: envoy-proxy-config\ndata:\n envoy.yaml: |\n admin:\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 9901\n\n static_resources:\n listeners:\n - name: listener_0\n address:\n socket_address:\n address: 0.0.0.0\n port_value: 8080\n filter_chains:\n - filters:\n - name: envoy.filters.network.http_connection_manager\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.network.http_connection_manager.v3.HttpConnectionManager\n stat_prefix: ingress_http\n codec_type: AUTO\n route_config:\n name: local_route\n virtual_hosts:\n - name: local_service\n domains: [\"*\"]\n routes:\n - match:\n prefix: \"/\"\n route:\n cluster: backend\n http_filters:\n - name: envoy.filters.http.router\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.filters.http.router.v3.Router\n transport_socket:\n name: envoy.transport_sockets.tls\n typed_config:\n \"@type\": type.googleapis.com/envoy.extensions.transport_sockets.tls.v3.DownstreamTlsContext\n common_tls_context:\n tls_certificates:\n - certificate_chain:\n filename: /etc/envoy-certificates/certificate.pem\n private_key:\n filename: /etc/envoy-certificates/certificate.key\n clusters:\n - name: backend\n connect_timeout: 5s\n type: STATIC\n dns_refresh_rate: 1800s\n lb_policy: ROUND_ROBIN\n load_assignment:\n cluster_name: backend\n endpoints:\n - lb_endpoints:\n - endpoint:\n address:\n socket_address:\n address: 127.0.0.1\n port_value: 80", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.046) 0:02:36.963 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.043) 0:02:37.007 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.034) 0:02:37.041 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "envoy-proxy-configmap", "__podman_quadlet_type": "yml", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.050) 0:02:37.091 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.064) 0:02:37.156 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.040) 0:02:37.196 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.041) 0:02:37.237 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.048) 0:02:37.286 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.384) 0:02:37.671 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:33:50 -0500 (0:00:00.043) 0:02:37.715 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.084) 0:02:37.800 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.038) 0:02:37.838 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.034) 0:02:37.873 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.040) 0:02:37.914 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.053) 0:02:37.967 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.042) 0:02:38.010 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.043) 0:02:38.053 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.069) 0:02:38.122 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.036) 0:02:38.159 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.033) 0:02:38.192 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/envoy-proxy-configmap.yml", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.079) 0:02:38.271 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.041) 0:02:38.312 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.107) 0:02:38.420 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.071) 0:02:38.492 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_service_name | length > 0", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:33:51 -0500 (0:00:00.057) 0:02:38.549 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613153.8633647, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "d681c7d56f912150d041873e880818b22a90c188", "ctime": 1736613125.2404845, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 339738865, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613124.9554856, "nlink": 1, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2102, "uid": 0, "version": "237935376", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.478) 0:02:39.028 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.071) 0:02:39.099 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.376) 0:02:39.475 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.039) 0:02:39.515 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.045) 0:02:39.561 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:33:52 -0500 (0:00:00.038) 0:02:39.600 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/envoy-proxy-configmap.yml", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:53 -0500 (0:00:00.410) 0:02:40.010 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.741) 0:02:40.751 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.047) 0:02:40.799 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.049) 0:02:40.848 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.037) 0:02:40.886 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.033131", "end": "2025-01-11 11:33:54.484689", "rc": 0, "start": "2025-01-11 11:33:54.451558" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.424) 0:02:41.310 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.060) 0:02:41.371 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.038) 0:02:41.410 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.041) 0:02:41.451 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:33:54 -0500 (0:00:00.143) 0:02:41.595 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032338", "end": "2025-01-11 11:33:55.205918", "rc": 0, "start": "2025-01-11 11:33:55.173580" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:33:55 -0500 (0:00:00.432) 0:02:42.027 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.033135", "end": "2025-01-11 11:33:55.620533", "rc": 0, "start": "2025-01-11 11:33:55.587398" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:33:55 -0500 (0:00:00.423) 0:02:42.450 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.033205", "end": "2025-01-11 11:33:56.043498", "rc": 0, "start": "2025-01-11 11:33:56.010293" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:33:56 -0500 (0:00:00.414) 0:02:42.865 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031327", "end": "2025-01-11 11:33:56.456413", "rc": 0, "start": "2025-01-11 11:33:56.425086" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:33:56 -0500 (0:00:00.411) 0:02:43.277 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:33:56 -0500 (0:00:00.413) 0:02:43.690 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:33:57 -0500 (0:00:00.414) 0:02:44.104 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:33:59 -0500 (0:00:01.944) 0:02:46.048 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.032) 0:02:46.081 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Install]\nWantedBy=default.target\n\n[Container]\nImage=quay.io/linux-system-roles/mysql:5.6\nContainerName=quadlet-demo-mysql\nVolume=quadlet-demo-mysql.volume:/var/lib/mysql\nVolume=/tmp/quadlet_demo:/var/lib/quadlet_demo:Z\nNetwork=quadlet-demo.network\nSecret=mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD\nHealthCmd=/bin/true\nHealthOnFailure=kill\n", "__podman_quadlet_template_src": "quadlet-demo-mysql.container.j2" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.097) 0:02:46.178 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.042) 0:02:46.221 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_str", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.035) 0:02:46.257 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "container", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.048) 0:02:46.306 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.061) 0:02:46.367 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.092) 0:02:46.459 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.039) 0:02:46.498 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:33:59 -0500 (0:00:00.050) 0:02:46.548 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.388) 0:02:46.937 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.037) 0:02:46.974 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.037) 0:02:47.012 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.036) 0:02:47.048 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.036) 0:02:47.085 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.038) 0:02:47.123 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.035) 0:02:47.159 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.035) 0:02:47.195 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.035) 0:02:47.230 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.056) 0:02:47.287 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.038) 0:02:47.325 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.031) 0:02:47.357 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [ "quay.io/linux-system-roles/mysql:5.6" ], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.container", "__podman_volumes": [ "/tmp/quadlet_demo" ] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.077) 0:02:47.435 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.041) 0:02:47.477 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.129) 0:02:47.606 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:34:00 -0500 (0:00:00.039) 0:02:47.645 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:32:33 EST", "ActiveEnterTimestampMonotonic": "474894959", "ActiveExitTimestamp": "Sat 2025-01-11 11:32:33 EST", "ActiveExitTimestampMonotonic": "475123094", "ActiveState": "failed", "After": "system.slice tmp.mount network-online.target sysinit.target basic.target quadlet-demo-mysql-volume.service systemd-journald.socket -.mount quadlet-demo-network.service", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:32:33 EST", "AssertTimestampMonotonic": "474649376", "Before": "multi-user.target shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "252952000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:32:33 EST", "ConditionTimestampMonotonic": "474649372", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "8182", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "yes", "DelegateControllers": "cpu cpuset io memory pids", "Description": "quadlet-demo-mysql.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "Environment": "PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-11 11:32:33 EST", "ExecMainExitTimestampMonotonic": "475122268", "ExecMainHandoffTimestampMonotonic": "0", "ExecMainPID": "35501", "ExecMainStartTimestamp": "Sat 2025-01-11 11:32:33 EST", "ExecMainStartTimestampMonotonic": "474850001", "ExecMainStatus": "1", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman run --name quadlet-demo-mysql --cidfile=/run/quadlet-demo-mysql.cid --replace --rm --cgroups=split --network systemd-quadlet-demo --sdnotify=conmon -d -v systemd-quadlet-demo-mysql:/var/lib/mysql -v /tmp/quadlet_demo:/var/lib/quadlet_demo:Z --secret mysql-root-password-container,type=env,target=MYSQL_ROOT_PASSWORD --health-cmd /bin/true --health-on-failure kill quay.io/linux-system-roles/mysql:5.6 ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStop": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPost": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; ignore_errors=yes ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStopPostEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman rm -v -f -i --cidfile=/run/quadlet-demo-mysql.cid ; flags=ignore-failure ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestamp": "Sat 2025-01-11 11:32:33 EST", "InactiveEnterTimestampMonotonic": "475165979", "InactiveExitTimestamp": "Sat 2025-01-11 11:32:33 EST", "InactiveExitTimestampMonotonic": "474650982", "InvocationID": "2313a055072a44c18cd532c711345fbd", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "mixed", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3209797632", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "32772096", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "all", "OOMPolicy": "continue", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "no", "RemoveIPC": "no", "Requires": "quadlet-demo-network.service system.slice sysinit.target quadlet-demo-mysql-volume.service -.mount", "RequiresMountsFor": "/run/containers /tmp/quadlet_demo", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "exit-code", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.container", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:32:33 EST", "StateChangeTimestampMonotonic": "475165979", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "failed", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "1min 30s", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "notify", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "WantedBy": "multi-user.target", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:34:01 -0500 (0:00:00.766) 0:02:48.412 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613120.346505, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4", "ctime": 1736613120.349505, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 213909731, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613120.080506, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 363, "uid": 0, "version": "37727778", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.387) 0:02:48.799 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.061) 0:02:48.860 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.369) 0:02:49.230 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.054) 0:02:49.284 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.038) 0:02:49.322 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.038) 0:02:49.361 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.container", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:34:02 -0500 (0:00:00.381) 0:02:49.742 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:34:03 -0500 (0:00:00.739) 0:02:50.482 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.433) 0:02:50.915 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.046) 0:02:50.962 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.034) 0:02:50.996 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.031335", "end": "2025-01-11 11:34:04.584156", "rc": 0, "start": "2025-01-11 11:34:04.552821" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.407) 0:02:51.404 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.111) 0:02:51.515 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.032) 0:02:51.548 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.033) 0:02:51.582 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:34:04 -0500 (0:00:00.034) 0:02:51.616 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.032727", "end": "2025-01-11 11:34:05.215688", "rc": 0, "start": "2025-01-11 11:34:05.182961" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:34:05 -0500 (0:00:00.418) 0:02:52.034 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032697", "end": "2025-01-11 11:34:05.625724", "rc": 0, "start": "2025-01-11 11:34:05.593027" } STDOUT: local systemd-quadlet-demo-mysql TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:34:05 -0500 (0:00:00.413) 0:02:52.448 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030887", "end": "2025-01-11 11:34:06.037219", "rc": 0, "start": "2025-01-11 11:34:06.006332" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:34:06 -0500 (0:00:00.410) 0:02:52.858 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031071", "end": "2025-01-11 11:34:06.448915", "rc": 0, "start": "2025-01-11 11:34:06.417844" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:34:06 -0500 (0:00:00.411) 0:02:53.269 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:34:06 -0500 (0:00:00.405) 0:02:53.675 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:34:07 -0500 (0:00:00.416) 0:02:54.091 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql-volume.service": { "name": "quadlet-demo-mysql-volume.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:09 -0500 (0:00:01.932) 0:02:56.023 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.032) 0:02:56.056 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo-mysql.volume", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Volume]", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.045) 0:02:56.101 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.043) 0:02:56.144 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.034) 0:02:56.178 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo-mysql", "__podman_quadlet_type": "volume", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.104) 0:02:56.283 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.065) 0:02:56.349 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.038) 0:02:56.388 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.037) 0:02:56.425 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:34:09 -0500 (0:00:00.044) 0:02:56.470 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.380) 0:02:56.850 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.039) 0:02:56.889 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.037) 0:02:56.926 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.037) 0:02:56.964 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.037) 0:02:57.001 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.035) 0:02:57.036 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.035) 0:02:57.072 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.034) 0:02:57.107 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.036) 0:02:57.143 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-mysql-volume.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.058) 0:02:57.202 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.085) 0:02:57.288 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.033) 0:02:57.322 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo-mysql.volume", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.080) 0:02:57.403 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.040) 0:02:57.444 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.077) 0:02:57.521 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:34:10 -0500 (0:00:00.035) 0:02:57.556 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-mysql-volume.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:31:50 EST", "ActiveEnterTimestampMonotonic": "431692975", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "sysinit.target network-online.target system.slice systemd-journald.socket basic.target -.mount", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:31:50 EST", "AssertTimestampMonotonic": "431633571", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "41483000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:31:50 EST", "ConditionTimestampMonotonic": "431633567", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "6344", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-mysql-volume.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-11 11:31:50 EST", "ExecMainExitTimestampMonotonic": "431692828", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:31:50 EST", "ExecMainHandoffTimestampMonotonic": "431644489", "ExecMainPID": "30783", "ExecMainStartTimestamp": "Sat 2025-01-11 11:31:50 EST", "ExecMainStartTimestampMonotonic": "431634378", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman volume create --ignore systemd-quadlet-demo-mysql ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-mysql-volume.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-mysql-volume.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:31:50 EST", "InactiveExitTimestampMonotonic": "431635090", "InvocationID": "cd2b6230c86e4f8497c1c937b7130553", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3228188672", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16502784", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-mysql-volume.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo-mysql.volume", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:31:50 EST", "StateChangeTimestampMonotonic": "431692975", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-mysql-volume", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:34:11 -0500 (0:00:00.789) 0:02:58.346 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613109.0625522, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "585f8cbdf0ec73000f9227dcffbef71e9552ea4a", "ctime": 1736613109.0655522, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 603980000, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613108.7775533, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9, "uid": 0, "version": "2939958891", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:34:11 -0500 (0:00:00.381) 0:02:58.728 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.067) 0:02:58.795 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.364) 0:02:59.159 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.053) 0:02:59.213 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.035) 0:02:59.248 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.034) 0:02:59.283 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo-mysql.volume", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:34:12 -0500 (0:00:00.381) 0:02:59.664 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:34:13 -0500 (0:00:00.730) 0:03:00.395 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.482) 0:03:00.878 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.048) 0:03:00.926 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.034) 0:03:00.961 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.032284", "end": "2025-01-11 11:34:14.551692", "rc": 0, "start": "2025-01-11 11:34:14.519408" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.410) 0:03:01.372 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.059) 0:03:01.432 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.031) 0:03:01.463 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.031) 0:03:01.495 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:34:14 -0500 (0:00:00.031) 0:03:01.526 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031030", "end": "2025-01-11 11:34:15.116781", "rc": 0, "start": "2025-01-11 11:34:15.085751" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:34:15 -0500 (0:00:00.410) 0:03:01.936 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032642", "end": "2025-01-11 11:34:15.524657", "rc": 0, "start": "2025-01-11 11:34:15.492015" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:34:15 -0500 (0:00:00.409) 0:03:02.346 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.029962", "end": "2025-01-11 11:34:15.933833", "rc": 0, "start": "2025-01-11 11:34:15.903871" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:34:16 -0500 (0:00:00.407) 0:03:02.754 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.031771", "end": "2025-01-11 11:34:16.352612", "rc": 0, "start": "2025-01-11 11:34:16.320841" } STDOUT: podman podman-default-kube-network systemd-quadlet-demo TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:34:16 -0500 (0:00:00.419) 0:03:03.173 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:34:16 -0500 (0:00:00.407) 0:03:03.580 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:34:17 -0500 (0:00:00.411) 0:03:03.992 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quadlet-demo-network.service": { "name": "quadlet-demo-network.service", "source": "systemd", "state": "stopped", "status": "generated" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:19 -0500 (0:00:02.037) 0:03:06.029 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 0] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:14 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.032) 0:03:06.062 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_file_src": "quadlet-demo.network", "__podman_quadlet_spec": {}, "__podman_quadlet_str": "[Network]\nSubnet=192.168.30.0/24\nGateway=192.168.30.1\nLabel=app=wordpress", "__podman_quadlet_template_src": "" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 1] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:25 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.048) 0:03:06.111 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_continue_if_pull_fails": false, "__podman_pull_image": true, "__podman_state": "absent", "__podman_systemd_unit_scope": "", "__podman_user": "root" }, "changed": false } TASK [fedora.linux_system_roles.podman : Fail if no quadlet spec is given] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:35 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.043) 0:03:06.154 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_quadlet_file_src", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 2] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:48 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.035) 0:03:06.189 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_name": "quadlet-demo", "__podman_quadlet_type": "network", "__podman_rootless": false }, "changed": false } TASK [fedora.linux_system_roles.podman : Check user and group information] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:57 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.050) 0:03:06.240 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Get user information] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:2 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.063) 0:03:06.303 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "'getent_passwd' not in ansible_facts or __podman_user not in ansible_facts['getent_passwd']", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user does not exist] ********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:9 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.042) 0:03:06.346 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not ansible_facts[\"getent_passwd\"][__podman_user]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set group for podman user] ************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:16 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.041) 0:03:06.387 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_group": "0" }, "changed": false } TASK [fedora.linux_system_roles.podman : See if getsubids exists] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:31 Saturday 11 January 2025 11:34:19 -0500 (0:00:00.048) 0:03:06.436 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736612945.6352425, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 32, "charset": "binary", "checksum": "89ab10a2a8fa81bcc0c1df0058f200469ce46f97", "ctime": 1736612940.9742577, "dev": 51714, "device_type": 0, "executable": true, "exists": true, "gid": 0, "gr_name": "root", "inode": 9160785, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "application/x-pie-executable", "mode": "0755", "mtime": 1730678400.0, "nlink": 1, "path": "/usr/bin/getsubids", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 15744, "uid": 0, "version": "1643853349", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": true, "xoth": true, "xusr": true } } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subuids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:42 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.382) 0:03:06.818 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Check with getsubids for user subgids] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:47 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.043) 0:03:06.862 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:52 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.035) 0:03:06.898 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_user not in [\"root\", \"0\"]", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subuid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:65 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.038) 0:03:06.936 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Get subgid file] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:70 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.037) 0:03:06.973 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set user subuid and subgid info] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:75 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.037) 0:03:07.011 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subuid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:85 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.089) 0:03:07.101 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Fail if user not in subgid file] ****** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_user_group.yml:92 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.036) 0:03:07.137 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "not __podman_stat_getsubids.stat.exists", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 3] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:62 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.039) 0:03:07.177 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_activate_systemd_unit": true, "__podman_images_found": [], "__podman_kube_yamls_raw": "", "__podman_service_name": "quadlet-demo-network.service", "__podman_systemd_scope": "system", "__podman_user_home_dir": "/root", "__podman_xdg_runtime_dir": "/run/user/0" }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 4] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:73 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.060) 0:03:07.237 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_path": "/etc/containers/systemd" }, "changed": false } TASK [fedora.linux_system_roles.podman : Get kube yaml contents] *************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:77 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.037) 0:03:07.275 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Set per-container variables part 5] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:87 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.032) 0:03:07.307 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_images": [], "__podman_quadlet_file": "/etc/containers/systemd/quadlet-demo.network", "__podman_volumes": [] }, "changed": false } TASK [fedora.linux_system_roles.podman : Set per-container variables part 6] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:105 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.080) 0:03:07.388 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Cleanup quadlets] ********************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:112 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.042) 0:03:07.431 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Stat XDG_RUNTIME_DIR] ***************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:4 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.078) 0:03:07.509 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Stop and disable service] ************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:12 Saturday 11 January 2025 11:34:20 -0500 (0:00:00.035) 0:03:07.545 ****** changed: [managed-node2] => { "changed": true, "enabled": false, "failed_when_result": false, "name": "quadlet-demo-network.service", "state": "stopped", "status": { "AccessSELinuxContext": "system_u:object_r:systemd_unit_file_t:s0", "ActiveEnterTimestamp": "Sat 2025-01-11 11:31:46 EST", "ActiveEnterTimestampMonotonic": "427332779", "ActiveExitTimestampMonotonic": "0", "ActiveState": "active", "After": "network-online.target -.mount system.slice systemd-journald.socket sysinit.target basic.target", "AllowIsolate": "no", "AssertResult": "yes", "AssertTimestamp": "Sat 2025-01-11 11:31:46 EST", "AssertTimestampMonotonic": "427283133", "Before": "shutdown.target", "BindLogSockets": "no", "BlockIOAccounting": "no", "BlockIOWeight": "[not set]", "CPUAccounting": "yes", "CPUAffinityFromNUMA": "no", "CPUQuotaPerSecUSec": "infinity", "CPUQuotaPeriodUSec": "infinity", "CPUSchedulingPolicy": "0", "CPUSchedulingPriority": "0", "CPUSchedulingResetOnFork": "no", "CPUShares": "[not set]", "CPUUsageNSec": "38360000", "CPUWeight": "[not set]", "CacheDirectoryMode": "0755", "CanFreeze": "yes", "CanIsolate": "no", "CanLiveMount": "no", "CanReload": "no", "CanStart": "yes", "CanStop": "yes", "CapabilityBoundingSet": "cap_chown cap_dac_override cap_dac_read_search cap_fowner cap_fsetid cap_kill cap_setgid cap_setuid cap_setpcap cap_linux_immutable cap_net_bind_service cap_net_broadcast cap_net_admin cap_net_raw cap_ipc_lock cap_ipc_owner cap_sys_module cap_sys_rawio cap_sys_chroot cap_sys_ptrace cap_sys_pacct cap_sys_admin cap_sys_boot cap_sys_nice cap_sys_resource cap_sys_time cap_sys_tty_config cap_mknod cap_lease cap_audit_write cap_audit_control cap_setfcap cap_mac_override cap_mac_admin cap_syslog cap_wake_alarm cap_block_suspend cap_audit_read cap_perfmon cap_bpf cap_checkpoint_restore", "CleanResult": "success", "CollectMode": "inactive", "ConditionResult": "yes", "ConditionTimestamp": "Sat 2025-01-11 11:31:46 EST", "ConditionTimestampMonotonic": "427283130", "ConfigurationDirectoryMode": "0755", "Conflicts": "shutdown.target", "ControlGroupId": "6305", "ControlPID": "0", "CoredumpFilter": "0x33", "CoredumpReceive": "no", "DebugInvocation": "no", "DefaultDependencies": "yes", "DefaultMemoryLow": "0", "DefaultMemoryMin": "0", "DefaultStartupMemoryLow": "0", "Delegate": "no", "Description": "quadlet-demo-network.service", "DevicePolicy": "auto", "DynamicUser": "no", "EffectiveMemoryHigh": "3698229248", "EffectiveMemoryMax": "3698229248", "EffectiveTasksMax": "22365", "ExecMainCode": "1", "ExecMainExitTimestamp": "Sat 2025-01-11 11:31:46 EST", "ExecMainExitTimestampMonotonic": "427332571", "ExecMainHandoffTimestamp": "Sat 2025-01-11 11:31:46 EST", "ExecMainHandoffTimestampMonotonic": "427296497", "ExecMainPID": "29953", "ExecMainStartTimestamp": "Sat 2025-01-11 11:31:46 EST", "ExecMainStartTimestampMonotonic": "427283913", "ExecMainStatus": "0", "ExecStart": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExecStartEx": "{ path=/usr/bin/podman ; argv[]=/usr/bin/podman network create --ignore --subnet 192.168.30.0/24 --gateway 192.168.30.1 --label app=wordpress systemd-quadlet-demo ; flags= ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }", "ExitType": "main", "ExtensionImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "FailureAction": "none", "FileDescriptorStoreMax": "0", "FileDescriptorStorePreserve": "restart", "FinalKillSignal": "9", "FragmentPath": "/run/systemd/generator/quadlet-demo-network.service", "FreezerState": "running", "GID": "[not set]", "GuessMainPID": "yes", "IOAccounting": "no", "IOReadBytes": "[not set]", "IOReadOperations": "[not set]", "IOSchedulingClass": "2", "IOSchedulingPriority": "4", "IOWeight": "[not set]", "IOWriteBytes": "[not set]", "IOWriteOperations": "[not set]", "IPAccounting": "no", "IPEgressBytes": "[no data]", "IPEgressPackets": "[no data]", "IPIngressBytes": "[no data]", "IPIngressPackets": "[no data]", "Id": "quadlet-demo-network.service", "IgnoreOnIsolate": "no", "IgnoreSIGPIPE": "yes", "InactiveEnterTimestampMonotonic": "0", "InactiveExitTimestamp": "Sat 2025-01-11 11:31:46 EST", "InactiveExitTimestampMonotonic": "427284847", "InvocationID": "c3a85eb142824ab399fbef64fed99c2b", "JobRunningTimeoutUSec": "infinity", "JobTimeoutAction": "none", "JobTimeoutUSec": "infinity", "KeyringMode": "private", "KillMode": "control-group", "KillSignal": "15", "LimitAS": "infinity", "LimitASSoft": "infinity", "LimitCORE": "infinity", "LimitCORESoft": "infinity", "LimitCPU": "infinity", "LimitCPUSoft": "infinity", "LimitDATA": "infinity", "LimitDATASoft": "infinity", "LimitFSIZE": "infinity", "LimitFSIZESoft": "infinity", "LimitLOCKS": "infinity", "LimitLOCKSSoft": "infinity", "LimitMEMLOCK": "8388608", "LimitMEMLOCKSoft": "8388608", "LimitMSGQUEUE": "819200", "LimitMSGQUEUESoft": "819200", "LimitNICE": "0", "LimitNICESoft": "0", "LimitNOFILE": "524288", "LimitNOFILESoft": "1024", "LimitNPROC": "13978", "LimitNPROCSoft": "13978", "LimitRSS": "infinity", "LimitRSSSoft": "infinity", "LimitRTPRIO": "0", "LimitRTPRIOSoft": "0", "LimitRTTIME": "infinity", "LimitRTTIMESoft": "infinity", "LimitSIGPENDING": "13978", "LimitSIGPENDINGSoft": "13978", "LimitSTACK": "infinity", "LimitSTACKSoft": "8388608", "LiveMountResult": "success", "LoadState": "loaded", "LockPersonality": "no", "LogLevelMax": "-1", "LogRateLimitBurst": "0", "LogRateLimitIntervalUSec": "0", "LogsDirectoryMode": "0755", "MainPID": "0", "ManagedOOMMemoryPressure": "auto", "ManagedOOMMemoryPressureDurationUSec": "[not set]", "ManagedOOMMemoryPressureLimit": "0", "ManagedOOMPreference": "none", "ManagedOOMSwap": "auto", "MemoryAccounting": "yes", "MemoryAvailable": "3232391168", "MemoryCurrent": "[not set]", "MemoryDenyWriteExecute": "no", "MemoryHigh": "infinity", "MemoryKSM": "no", "MemoryLimit": "infinity", "MemoryLow": "0", "MemoryMax": "infinity", "MemoryMin": "0", "MemoryPeak": "16506880", "MemoryPressureThresholdUSec": "200ms", "MemoryPressureWatch": "auto", "MemorySwapCurrent": "[not set]", "MemorySwapMax": "infinity", "MemorySwapPeak": "0", "MemoryZSwapCurrent": "[not set]", "MemoryZSwapMax": "infinity", "MemoryZSwapWriteback": "yes", "MountAPIVFS": "no", "MountImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "NFileDescriptorStore": "0", "NRestarts": "0", "NUMAPolicy": "n/a", "Names": "quadlet-demo-network.service", "NeedDaemonReload": "no", "Nice": "0", "NoNewPrivileges": "no", "NonBlocking": "no", "NotifyAccess": "none", "OOMPolicy": "stop", "OOMScoreAdjust": "0", "OnFailureJobMode": "replace", "OnSuccessJobMode": "fail", "Perpetual": "no", "PrivateDevices": "no", "PrivateIPC": "no", "PrivateMounts": "no", "PrivateNetwork": "no", "PrivatePIDs": "no", "PrivateTmp": "no", "PrivateTmpEx": "no", "PrivateUsers": "no", "PrivateUsersEx": "no", "ProcSubset": "all", "ProtectClock": "no", "ProtectControlGroups": "no", "ProtectControlGroupsEx": "no", "ProtectHome": "no", "ProtectHostname": "no", "ProtectKernelLogs": "no", "ProtectKernelModules": "no", "ProtectKernelTunables": "no", "ProtectProc": "default", "ProtectSystem": "no", "RefuseManualStart": "no", "RefuseManualStop": "no", "ReloadResult": "success", "ReloadSignal": "1", "RemainAfterExit": "yes", "RemoveIPC": "no", "Requires": "sysinit.target -.mount system.slice", "RequiresMountsFor": "/run/containers", "Restart": "no", "RestartKillSignal": "15", "RestartMaxDelayUSec": "infinity", "RestartMode": "normal", "RestartSteps": "0", "RestartUSec": "100ms", "RestartUSecNext": "100ms", "RestrictNamespaces": "no", "RestrictRealtime": "no", "RestrictSUIDSGID": "no", "Result": "success", "RootDirectoryStartOnly": "no", "RootEphemeral": "no", "RootImagePolicy": "root=verity+signed+encrypted+unprotected+absent:usr=verity+signed+encrypted+unprotected+absent:home=encrypted+unprotected+absent:srv=encrypted+unprotected+absent:tmp=encrypted+unprotected+absent:var=encrypted+unprotected+absent", "RuntimeDirectoryMode": "0755", "RuntimeDirectoryPreserve": "no", "RuntimeMaxUSec": "infinity", "RuntimeRandomizedExtraUSec": "0", "SameProcessGroup": "no", "SecureBits": "0", "SendSIGHUP": "no", "SendSIGKILL": "yes", "SetLoginEnvironment": "no", "Slice": "system.slice", "SourcePath": "/etc/containers/systemd/quadlet-demo.network", "StandardError": "inherit", "StandardInput": "null", "StandardOutput": "journal", "StartLimitAction": "none", "StartLimitBurst": "5", "StartLimitIntervalUSec": "10s", "StartupBlockIOWeight": "[not set]", "StartupCPUShares": "[not set]", "StartupCPUWeight": "[not set]", "StartupIOWeight": "[not set]", "StartupMemoryHigh": "infinity", "StartupMemoryLow": "0", "StartupMemoryMax": "infinity", "StartupMemorySwapMax": "infinity", "StartupMemoryZSwapMax": "infinity", "StateChangeTimestamp": "Sat 2025-01-11 11:31:46 EST", "StateChangeTimestampMonotonic": "427332779", "StateDirectoryMode": "0755", "StatusErrno": "0", "StopWhenUnneeded": "no", "SubState": "exited", "SuccessAction": "none", "SurviveFinalKillSignal": "no", "SyslogFacility": "3", "SyslogIdentifier": "quadlet-demo-network", "SyslogLevel": "6", "SyslogLevelPrefix": "yes", "SyslogPriority": "30", "SystemCallErrorNumber": "2147483646", "TTYReset": "no", "TTYVHangup": "no", "TTYVTDisallocate": "no", "TasksAccounting": "yes", "TasksCurrent": "[not set]", "TasksMax": "22365", "TimeoutAbortUSec": "1min 30s", "TimeoutCleanUSec": "infinity", "TimeoutStartFailureMode": "terminate", "TimeoutStartUSec": "infinity", "TimeoutStopFailureMode": "terminate", "TimeoutStopUSec": "1min 30s", "TimerSlackNSec": "50000", "Transient": "no", "Type": "oneshot", "UID": "[not set]", "UMask": "0022", "UnitFilePreset": "disabled", "UnitFileState": "generated", "UtmpMode": "init", "Wants": "network-online.target", "WatchdogSignal": "6", "WatchdogTimestampMonotonic": "0", "WatchdogUSec": "0" } } TASK [fedora.linux_system_roles.podman : See if quadlet file exists] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:33 Saturday 11 January 2025 11:34:21 -0500 (0:00:00.795) 0:03:08.340 ****** ok: [managed-node2] => { "changed": false, "stat": { "atime": 1736613104.584571, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "e57c08d49aff4bae8daab138d913aeddaa8682a0", "ctime": 1736613104.587571, "dev": 51714, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 557842650, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1736613104.1585727, "nlink": 1, "path": "/etc/containers/systemd/quadlet-demo.network", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 74, "uid": 0, "version": "2985817482", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false } } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:38 Saturday 11 January 2025 11:34:21 -0500 (0:00:00.382) 0:03:08.723 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Slurp quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:6 Saturday 11 January 2025 11:34:22 -0500 (0:00:00.061) 0:03:08.784 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet file] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:12 Saturday 11 January 2025 11:34:22 -0500 (0:00:00.389) 0:03:09.174 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Parse quadlet yaml file] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:44 Saturday 11 January 2025 11:34:22 -0500 (0:00:00.091) 0:03:09.266 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Reset raw variable] ******************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/parse_quadlet_file.yml:52 Saturday 11 January 2025 11:34:22 -0500 (0:00:00.098) 0:03:09.364 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_raw": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Remove quadlet file] ****************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:42 Saturday 11 January 2025 11:34:22 -0500 (0:00:00.039) 0:03:09.404 ****** changed: [managed-node2] => { "changed": true, "path": "/etc/containers/systemd/quadlet-demo.network", "state": "absent" } TASK [fedora.linux_system_roles.podman : Refresh systemd] ********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:48 Saturday 11 January 2025 11:34:23 -0500 (0:00:00.381) 0:03:09.785 ****** ok: [managed-node2] => { "changed": false, "name": null, "status": {} } TASK [fedora.linux_system_roles.podman : Remove managed resource] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:58 Saturday 11 January 2025 11:34:23 -0500 (0:00:00.731) 0:03:10.517 ****** changed: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": true } TASK [fedora.linux_system_roles.podman : Remove volumes] *********************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.424) 0:03:10.941 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Clear parsed podman variable] ********* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.045) 0:03:10.987 ****** ok: [managed-node2] => { "ansible_facts": { "__podman_quadlet_parsed": null }, "changed": false } TASK [fedora.linux_system_roles.podman : Prune images no longer in use] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.033) 0:03:11.021 ****** changed: [managed-node2] => { "changed": true, "cmd": [ "podman", "image", "prune", "--all", "-f" ], "delta": "0:00:00.033058", "end": "2025-01-11 11:34:24.611535", "rc": 0, "start": "2025-01-11 11:34:24.578477" } TASK [fedora.linux_system_roles.podman : Manage linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:131 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.410) 0:03:11.431 ****** included: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml for managed-node2 TASK [fedora.linux_system_roles.podman : Enable linger if needed] ************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:12 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.059) 0:03:11.490 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user as not yet needing to cancel linger] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:18 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.033) 0:03:11.524 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Mark user for possible linger cancel] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/manage_linger.yml:22 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.032) 0:03:11.557 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_rootless | bool", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : For testing and debugging - images] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:141 Saturday 11 January 2025 11:34:24 -0500 (0:00:00.034) 0:03:11.592 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "images", "-n" ], "delta": "0:00:00.031985", "end": "2025-01-11 11:34:25.185402", "rc": 0, "start": "2025-01-11 11:34:25.153417" } TASK [fedora.linux_system_roles.podman : For testing and debugging - volumes] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:150 Saturday 11 January 2025 11:34:25 -0500 (0:00:00.415) 0:03:12.007 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "volume", "ls", "-n" ], "delta": "0:00:00.032403", "end": "2025-01-11 11:34:25.599279", "rc": 0, "start": "2025-01-11 11:34:25.566876" } TASK [fedora.linux_system_roles.podman : For testing and debugging - containers] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:159 Saturday 11 January 2025 11:34:25 -0500 (0:00:00.411) 0:03:12.418 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "ps", "--noheading" ], "delta": "0:00:00.030624", "end": "2025-01-11 11:34:26.007793", "rc": 0, "start": "2025-01-11 11:34:25.977169" } TASK [fedora.linux_system_roles.podman : For testing and debugging - networks] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:168 Saturday 11 January 2025 11:34:26 -0500 (0:00:00.409) 0:03:12.828 ****** ok: [managed-node2] => { "changed": false, "cmd": [ "podman", "network", "ls", "-n", "-q" ], "delta": "0:00:00.032006", "end": "2025-01-11 11:34:26.422829", "rc": 0, "start": "2025-01-11 11:34:26.390823" } STDOUT: podman podman-default-kube-network TASK [fedora.linux_system_roles.podman : For testing and debugging - secrets] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:177 Saturday 11 January 2025 11:34:26 -0500 (0:00:00.473) 0:03:13.302 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - pods] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:187 Saturday 11 January 2025 11:34:26 -0500 (0:00:00.410) 0:03:13.712 ****** ok: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : For testing and debugging - services] *** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 Saturday 11 January 2025 11:34:27 -0500 (0:00:00.414) 0:03:14.126 ****** ok: [managed-node2] => { "ansible_facts": { "services": { "NetworkManager-dispatcher.service": { "name": "NetworkManager-dispatcher.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "NetworkManager-wait-online.service": { "name": "NetworkManager-wait-online.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "NetworkManager.service": { "name": "NetworkManager.service", "source": "systemd", "state": "running", "status": "enabled" }, "audit-rules.service": { "name": "audit-rules.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "auditd.service": { "name": "auditd.service", "source": "systemd", "state": "running", "status": "enabled" }, "auth-rpcgss-module.service": { "name": "auth-rpcgss-module.service", "source": "systemd", "state": "stopped", "status": "static" }, "autofs.service": { "name": "autofs.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "autovt@.service": { "name": "autovt@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "blk-availability.service": { "name": "blk-availability.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "capsule@.service": { "name": "capsule@.service", "source": "systemd", "state": "unknown", "status": "static" }, "certmonger.service": { "name": "certmonger.service", "source": "systemd", "state": "running", "status": "enabled" }, "chrony-wait.service": { "name": "chrony-wait.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd-restricted.service": { "name": "chronyd-restricted.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "chronyd.service": { "name": "chronyd.service", "source": "systemd", "state": "running", "status": "enabled" }, "cloud-config.service": { "name": "cloud-config.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-final.service": { "name": "cloud-final.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init-hotplugd.service": { "name": "cloud-init-hotplugd.service", "source": "systemd", "state": "inactive", "status": "static" }, "cloud-init-local.service": { "name": "cloud-init-local.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "cloud-init.service": { "name": "cloud-init.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "console-getty.service": { "name": "console-getty.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "container-getty@.service": { "name": "container-getty@.service", "source": "systemd", "state": "unknown", "status": "static" }, "crond.service": { "name": "crond.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-broker.service": { "name": "dbus-broker.service", "source": "systemd", "state": "running", "status": "enabled" }, "dbus-org.fedoraproject.FirewallD1.service": { "name": "dbus-org.fedoraproject.FirewallD1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.hostname1.service": { "name": "dbus-org.freedesktop.hostname1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.locale1.service": { "name": "dbus-org.freedesktop.locale1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.login1.service": { "name": "dbus-org.freedesktop.login1.service", "source": "systemd", "state": "active", "status": "alias" }, "dbus-org.freedesktop.nm-dispatcher.service": { "name": "dbus-org.freedesktop.nm-dispatcher.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus-org.freedesktop.timedate1.service": { "name": "dbus-org.freedesktop.timedate1.service", "source": "systemd", "state": "inactive", "status": "alias" }, "dbus.service": { "name": "dbus.service", "source": "systemd", "state": "active", "status": "alias" }, "debug-shell.service": { "name": "debug-shell.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd.service": { "name": "dhcpcd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dhcpcd@.service": { "name": "dhcpcd@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "display-manager.service": { "name": "display-manager.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "dm-event.service": { "name": "dm-event.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-makecache.service": { "name": "dnf-makecache.service", "source": "systemd", "state": "stopped", "status": "static" }, "dnf-system-upgrade-cleanup.service": { "name": "dnf-system-upgrade-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "dnf-system-upgrade.service": { "name": "dnf-system-upgrade.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "dracut-cmdline.service": { "name": "dracut-cmdline.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-initqueue.service": { "name": "dracut-initqueue.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-mount.service": { "name": "dracut-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-mount.service": { "name": "dracut-pre-mount.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-pivot.service": { "name": "dracut-pre-pivot.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-trigger.service": { "name": "dracut-pre-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-pre-udev.service": { "name": "dracut-pre-udev.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown-onfailure.service": { "name": "dracut-shutdown-onfailure.service", "source": "systemd", "state": "stopped", "status": "static" }, "dracut-shutdown.service": { "name": "dracut-shutdown.service", "source": "systemd", "state": "stopped", "status": "static" }, "ebtables.service": { "name": "ebtables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "emergency.service": { "name": "emergency.service", "source": "systemd", "state": "stopped", "status": "static" }, "fips-crypto-policy-overlay.service": { "name": "fips-crypto-policy-overlay.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "firewalld.service": { "name": "firewalld.service", "source": "systemd", "state": "running", "status": "enabled" }, "fsidd.service": { "name": "fsidd.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "fstrim.service": { "name": "fstrim.service", "source": "systemd", "state": "stopped", "status": "static" }, "getty@.service": { "name": "getty@.service", "source": "systemd", "state": "unknown", "status": "enabled" }, "getty@tty1.service": { "name": "getty@tty1.service", "source": "systemd", "state": "running", "status": "active" }, "grub-boot-indeterminate.service": { "name": "grub-boot-indeterminate.service", "source": "systemd", "state": "inactive", "status": "static" }, "grub2-systemd-integration.service": { "name": "grub2-systemd-integration.service", "source": "systemd", "state": "inactive", "status": "static" }, "gssproxy.service": { "name": "gssproxy.service", "source": "systemd", "state": "running", "status": "disabled" }, "hv_kvp_daemon.service": { "name": "hv_kvp_daemon.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "initrd-cleanup.service": { "name": "initrd-cleanup.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-parse-etc.service": { "name": "initrd-parse-etc.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-switch-root.service": { "name": "initrd-switch-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "initrd-udevadm-cleanup-db.service": { "name": "initrd-udevadm-cleanup-db.service", "source": "systemd", "state": "stopped", "status": "static" }, "ip6tables.service": { "name": "ip6tables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ipset.service": { "name": "ipset.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "iptables.service": { "name": "iptables.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "irqbalance.service": { "name": "irqbalance.service", "source": "systemd", "state": "running", "status": "enabled" }, "kdump.service": { "name": "kdump.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "kmod-static-nodes.service": { "name": "kmod-static-nodes.service", "source": "systemd", "state": "stopped", "status": "static" }, "kvm_stat.service": { "name": "kvm_stat.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "ldconfig.service": { "name": "ldconfig.service", "source": "systemd", "state": "stopped", "status": "static" }, "logrotate.service": { "name": "logrotate.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm-devices-import.service": { "name": "lvm-devices-import.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "lvm2-lvmpolld.service": { "name": "lvm2-lvmpolld.service", "source": "systemd", "state": "stopped", "status": "static" }, "lvm2-monitor.service": { "name": "lvm2-monitor.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "man-db-cache-update.service": { "name": "man-db-cache-update.service", "source": "systemd", "state": "inactive", "status": "static" }, "man-db-restart-cache-update.service": { "name": "man-db-restart-cache-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "microcode.service": { "name": "microcode.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "modprobe@.service": { "name": "modprobe@.service", "source": "systemd", "state": "unknown", "status": "static" }, "modprobe@configfs.service": { "name": "modprobe@configfs.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@dm_mod.service": { "name": "modprobe@dm_mod.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@drm.service": { "name": "modprobe@drm.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@efi_pstore.service": { "name": "modprobe@efi_pstore.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@fuse.service": { "name": "modprobe@fuse.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "modprobe@loop.service": { "name": "modprobe@loop.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "netavark-dhcp-proxy.service": { "name": "netavark-dhcp-proxy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "netavark-firewalld-reload.service": { "name": "netavark-firewalld-reload.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "network.service": { "name": "network.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "nfs-blkmap.service": { "name": "nfs-blkmap.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nfs-idmapd.service": { "name": "nfs-idmapd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-mountd.service": { "name": "nfs-mountd.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfs-server.service": { "name": "nfs-server.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "nfs-utils.service": { "name": "nfs-utils.service", "source": "systemd", "state": "stopped", "status": "static" }, "nfsdcld.service": { "name": "nfsdcld.service", "source": "systemd", "state": "stopped", "status": "static" }, "nftables.service": { "name": "nftables.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nis-domainname.service": { "name": "nis-domainname.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "nm-priv-helper.service": { "name": "nm-priv-helper.service", "source": "systemd", "state": "inactive", "status": "static" }, "ntpd.service": { "name": "ntpd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ntpdate.service": { "name": "ntpdate.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "pam_namespace.service": { "name": "pam_namespace.service", "source": "systemd", "state": "inactive", "status": "static" }, "pcscd.service": { "name": "pcscd.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "plymouth-quit-wait.service": { "name": "plymouth-quit-wait.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "plymouth-start.service": { "name": "plymouth-start.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "podman-auto-update.service": { "name": "podman-auto-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-clean-transient.service": { "name": "podman-clean-transient.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman-kube@.service": { "name": "podman-kube@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "podman-restart.service": { "name": "podman-restart.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "podman.service": { "name": "podman.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "polkit.service": { "name": "polkit.service", "source": "systemd", "state": "running", "status": "static" }, "qemu-guest-agent.service": { "name": "qemu-guest-agent.service", "source": "systemd", "state": "inactive", "status": "enabled" }, "quadlet-demo-mysql.service": { "name": "quadlet-demo-mysql.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quadlet-demo.service": { "name": "quadlet-demo.service", "source": "systemd", "state": "stopped", "status": "failed" }, "quotaon-root.service": { "name": "quotaon-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "quotaon@.service": { "name": "quotaon@.service", "source": "systemd", "state": "unknown", "status": "static" }, "rc-local.service": { "name": "rc-local.service", "source": "systemd", "state": "stopped", "status": "static" }, "rescue.service": { "name": "rescue.service", "source": "systemd", "state": "stopped", "status": "static" }, "restraintd.service": { "name": "restraintd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rngd.service": { "name": "rngd.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpc-gssd.service": { "name": "rpc-gssd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd-notify.service": { "name": "rpc-statd-notify.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-statd.service": { "name": "rpc-statd.service", "source": "systemd", "state": "stopped", "status": "static" }, "rpc-svcgssd.service": { "name": "rpc-svcgssd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "rpcbind.service": { "name": "rpcbind.service", "source": "systemd", "state": "running", "status": "enabled" }, "rpmdb-migrate.service": { "name": "rpmdb-migrate.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rpmdb-rebuild.service": { "name": "rpmdb-rebuild.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "rsyslog.service": { "name": "rsyslog.service", "source": "systemd", "state": "running", "status": "enabled" }, "selinux-autorelabel-mark.service": { "name": "selinux-autorelabel-mark.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "selinux-autorelabel.service": { "name": "selinux-autorelabel.service", "source": "systemd", "state": "inactive", "status": "static" }, "selinux-check-proper-disable.service": { "name": "selinux-check-proper-disable.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "serial-getty@.service": { "name": "serial-getty@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "serial-getty@ttyS0.service": { "name": "serial-getty@ttyS0.service", "source": "systemd", "state": "running", "status": "active" }, "sntp.service": { "name": "sntp.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "ssh-host-keys-migration.service": { "name": "ssh-host-keys-migration.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "sshd-keygen.service": { "name": "sshd-keygen.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "sshd-keygen@.service": { "name": "sshd-keygen@.service", "source": "systemd", "state": "unknown", "status": "disabled" }, "sshd-keygen@ecdsa.service": { "name": "sshd-keygen@ecdsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@ed25519.service": { "name": "sshd-keygen@ed25519.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-keygen@rsa.service": { "name": "sshd-keygen@rsa.service", "source": "systemd", "state": "stopped", "status": "inactive" }, "sshd-unix-local@.service": { "name": "sshd-unix-local@.service", "source": "systemd", "state": "unknown", "status": "alias" }, "sshd.service": { "name": "sshd.service", "source": "systemd", "state": "running", "status": "enabled" }, "sshd@.service": { "name": "sshd@.service", "source": "systemd", "state": "unknown", "status": "indirect" }, "sssd-autofs.service": { "name": "sssd-autofs.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-kcm.service": { "name": "sssd-kcm.service", "source": "systemd", "state": "stopped", "status": "indirect" }, "sssd-nss.service": { "name": "sssd-nss.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pac.service": { "name": "sssd-pac.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-pam.service": { "name": "sssd-pam.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-ssh.service": { "name": "sssd-ssh.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd-sudo.service": { "name": "sssd-sudo.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "sssd.service": { "name": "sssd.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "syslog.service": { "name": "syslog.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "system-update-cleanup.service": { "name": "system-update-cleanup.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-ask-password-console.service": { "name": "systemd-ask-password-console.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-ask-password-wall.service": { "name": "systemd-ask-password-wall.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-backlight@.service": { "name": "systemd-backlight@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-battery-check.service": { "name": "systemd-battery-check.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-binfmt.service": { "name": "systemd-binfmt.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-bless-boot.service": { "name": "systemd-bless-boot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-boot-check-no-failures.service": { "name": "systemd-boot-check-no-failures.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-boot-random-seed.service": { "name": "systemd-boot-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-boot-update.service": { "name": "systemd-boot-update.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-bootctl@.service": { "name": "systemd-bootctl@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-confext.service": { "name": "systemd-confext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-coredump@.service": { "name": "systemd-coredump@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-creds@.service": { "name": "systemd-creds@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-exit.service": { "name": "systemd-exit.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-firstboot.service": { "name": "systemd-firstboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck-root.service": { "name": "systemd-fsck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-fsck@.service": { "name": "systemd-fsck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-growfs-root.service": { "name": "systemd-growfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-growfs@.service": { "name": "systemd-growfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-halt.service": { "name": "systemd-halt.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hibernate-clear.service": { "name": "systemd-hibernate-clear.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate-resume.service": { "name": "systemd-hibernate-resume.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hibernate.service": { "name": "systemd-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-hostnamed.service": { "name": "systemd-hostnamed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hwdb-update.service": { "name": "systemd-hwdb-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-hybrid-sleep.service": { "name": "systemd-hybrid-sleep.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-initctl.service": { "name": "systemd-initctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-catalog-update.service": { "name": "systemd-journal-catalog-update.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journal-flush.service": { "name": "systemd-journal-flush.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-journald-sync@.service": { "name": "systemd-journald-sync@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-journald.service": { "name": "systemd-journald.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-journald@.service": { "name": "systemd-journald@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-kexec.service": { "name": "systemd-kexec.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-localed.service": { "name": "systemd-localed.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-logind.service": { "name": "systemd-logind.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-machine-id-commit.service": { "name": "systemd-machine-id-commit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-modules-load.service": { "name": "systemd-modules-load.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-network-generator.service": { "name": "systemd-network-generator.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-networkd-wait-online.service": { "name": "systemd-networkd-wait-online.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-oomd.service": { "name": "systemd-oomd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-pcrextend@.service": { "name": "systemd-pcrextend@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrfs-root.service": { "name": "systemd-pcrfs-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pcrfs@.service": { "name": "systemd-pcrfs@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrlock-file-system.service": { "name": "systemd-pcrlock-file-system.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-code.service": { "name": "systemd-pcrlock-firmware-code.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-firmware-config.service": { "name": "systemd-pcrlock-firmware-config.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-machine-id.service": { "name": "systemd-pcrlock-machine-id.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-make-policy.service": { "name": "systemd-pcrlock-make-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-authority.service": { "name": "systemd-pcrlock-secureboot-authority.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock-secureboot-policy.service": { "name": "systemd-pcrlock-secureboot-policy.service", "source": "systemd", "state": "inactive", "status": "disabled" }, "systemd-pcrlock@.service": { "name": "systemd-pcrlock@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-pcrmachine.service": { "name": "systemd-pcrmachine.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-initrd.service": { "name": "systemd-pcrphase-initrd.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase-sysinit.service": { "name": "systemd-pcrphase-sysinit.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-pcrphase.service": { "name": "systemd-pcrphase.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-poweroff.service": { "name": "systemd-poweroff.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-pstore.service": { "name": "systemd-pstore.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-quotacheck-root.service": { "name": "systemd-quotacheck-root.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-quotacheck@.service": { "name": "systemd-quotacheck@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-random-seed.service": { "name": "systemd-random-seed.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-reboot.service": { "name": "systemd-reboot.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-remount-fs.service": { "name": "systemd-remount-fs.service", "source": "systemd", "state": "stopped", "status": "enabled-runtime" }, "systemd-repart.service": { "name": "systemd-repart.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-rfkill.service": { "name": "systemd-rfkill.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-soft-reboot.service": { "name": "systemd-soft-reboot.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-suspend-then-hibernate.service": { "name": "systemd-suspend-then-hibernate.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-suspend.service": { "name": "systemd-suspend.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-sysctl.service": { "name": "systemd-sysctl.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-sysext.service": { "name": "systemd-sysext.service", "source": "systemd", "state": "stopped", "status": "enabled" }, "systemd-sysext@.service": { "name": "systemd-sysext@.service", "source": "systemd", "state": "unknown", "status": "static" }, "systemd-sysupdate-reboot.service": { "name": "systemd-sysupdate-reboot.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysupdate.service": { "name": "systemd-sysupdate.service", "source": "systemd", "state": "inactive", "status": "indirect" }, "systemd-sysusers.service": { "name": "systemd-sysusers.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-timedated.service": { "name": "systemd-timedated.service", "source": "systemd", "state": "inactive", "status": "static" }, "systemd-timesyncd.service": { "name": "systemd-timesyncd.service", "source": "systemd", "state": "stopped", "status": "not-found" }, "systemd-tmpfiles-clean.service": { "name": "systemd-tmpfiles-clean.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev-early.service": { "name": "systemd-tmpfiles-setup-dev-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup-dev.service": { "name": "systemd-tmpfiles-setup-dev.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tmpfiles-setup.service": { "name": "systemd-tmpfiles-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup-early.service": { "name": "systemd-tpm2-setup-early.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-tpm2-setup.service": { "name": "systemd-tpm2-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-load-credentials.service": { "name": "systemd-udev-load-credentials.service", "source": "systemd", "state": "stopped", "status": "disabled" }, "systemd-udev-settle.service": { "name": "systemd-udev-settle.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udev-trigger.service": { "name": "systemd-udev-trigger.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-udevd.service": { "name": "systemd-udevd.service", "source": "systemd", "state": "running", "status": "static" }, "systemd-update-done.service": { "name": "systemd-update-done.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp-runlevel.service": { "name": "systemd-update-utmp-runlevel.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-update-utmp.service": { "name": "systemd-update-utmp.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-user-sessions.service": { "name": "systemd-user-sessions.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-vconsole-setup.service": { "name": "systemd-vconsole-setup.service", "source": "systemd", "state": "stopped", "status": "static" }, "systemd-volatile-root.service": { "name": "systemd-volatile-root.service", "source": "systemd", "state": "inactive", "status": "static" }, "user-runtime-dir@.service": { "name": "user-runtime-dir@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user-runtime-dir@0.service": { "name": "user-runtime-dir@0.service", "source": "systemd", "state": "stopped", "status": "active" }, "user-runtime-dir@3001.service": { "name": "user-runtime-dir@3001.service", "source": "systemd", "state": "stopped", "status": "active" }, "user@.service": { "name": "user@.service", "source": "systemd", "state": "unknown", "status": "static" }, "user@0.service": { "name": "user@0.service", "source": "systemd", "state": "running", "status": "active" }, "user@3001.service": { "name": "user@3001.service", "source": "systemd", "state": "running", "status": "active" }, "ypbind.service": { "name": "ypbind.service", "source": "systemd", "state": "stopped", "status": "not-found" } } }, "changed": false } TASK [fedora.linux_system_roles.podman : Create and update quadlets] *********** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/handle_quadlet_spec.yml:116 Saturday 11 January 2025 11:34:29 -0500 (0:00:01.933) 0:03:16.059 ****** skipping: [managed-node2] => { "changed": false, "false_condition": "__podman_state != \"absent\"", "skip_reason": "Conditional result was False" } TASK [fedora.linux_system_roles.podman : Cancel linger] ************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:196 Saturday 11 January 2025 11:34:29 -0500 (0:00:00.032) 0:03:16.092 ****** skipping: [managed-node2] => { "changed": false, "skipped_reason": "No items in the list" } TASK [fedora.linux_system_roles.podman : Handle credential files - absent] ***** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:202 Saturday 11 January 2025 11:34:29 -0500 (0:00:00.030) 0:03:16.123 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [fedora.linux_system_roles.podman : Handle certs.d files - absent] ******** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:211 Saturday 11 January 2025 11:34:29 -0500 (0:00:00.029) 0:03:16.152 ****** skipping: [managed-node2] => { "censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false } TASK [Ensure no resources] ***************************************************** task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:188 Saturday 11 January 2025 11:34:29 -0500 (0:00:00.047) 0:03:16.200 ****** fatal: [managed-node2]: FAILED! => { "assertion": "ansible_facts[\"services\"] | dict2items | rejectattr(\"value.status\", \"match\", \"not-found\") | selectattr(\"key\", \"match\", \"quadlet-demo\") | list | length == 0", "changed": false, "evaluated_to": false } MSG: Assertion failed TASK [Debug] ******************************************************************* task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:199 Saturday 11 January 2025 11:34:29 -0500 (0:00:00.057) 0:03:16.258 ****** ok: [managed-node2] => { "changed": false, "cmd": "exec 1>&2\nset -x\nset -o pipefail\nsystemctl list-units --plain -l --all | grep quadlet || :\nsystemctl list-unit-files --all | grep quadlet || :\nsystemctl list-units --plain --failed -l --all | grep quadlet || :\n", "delta": "0:00:00.369406", "end": "2025-01-11 11:34:30.176154", "rc": 0, "start": "2025-01-11 11:34:29.806748" } STDERR: + set -o pipefail + systemctl list-units --plain -l --all + grep quadlet quadlet-demo-mysql.service not-found failed failed quadlet-demo-mysql.service quadlet-demo.service not-found failed failed quadlet-demo.service + systemctl list-unit-files --all + grep quadlet + : + systemctl list-units --plain --failed -l --all + grep quadlet quadlet-demo-mysql.service not-found failed failed quadlet-demo-mysql.service quadlet-demo.service not-found failed failed quadlet-demo.service TASK [Get journald] ************************************************************ task path: /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:209 Saturday 11 January 2025 11:34:30 -0500 (0:00:00.738) 0:03:16.997 ****** fatal: [managed-node2]: FAILED! => { "changed": false, "cmd": [ "journalctl", "-ex" ], "delta": "0:00:00.029908", "end": "2025-01-11 11:34:30.576470", "failed_when_result": true, "rc": 0, "start": "2025-01-11 11:34:30.546562" } STDOUT: Jan 11 11:28:12 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:28:32 managed-node2 systemd[1]: systemd-hostnamed.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit systemd-hostnamed.service has successfully entered the 'dead' state. Jan 11 11:28:35 managed-node2 sshd-session[6514]: Accepted publickey for root from 10.31.14.128 port 52846 ssh2: RSA SHA256:9j1blwt3wcrRiGYZQ7ZGu9axm3cDklH6/z4c+Ee8CzE Jan 11 11:28:35 managed-node2 systemd-logind[659]: New session 5 of user root. ░░ Subject: A new session 5 has been created for user root ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 5 has been created for the user root. ░░ ░░ The leading process of the session is 6514. Jan 11 11:28:35 managed-node2 systemd[1]: Started session-5.scope - Session 5 of User root. ░░ Subject: A start job for unit session-5.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit session-5.scope has finished successfully. ░░ ░░ The job identifier is 1136. Jan 11 11:28:35 managed-node2 sshd-session[6514]: pam_unix(sshd:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:28:36 managed-node2 python3.12[6663]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:28:37 managed-node2 python3.12[6817]: ansible-tempfile Invoked with state=directory prefix=lsr_ suffix=_podman path=None Jan 11 11:28:38 managed-node2 python3.12[6942]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False state=None _original_basename=None _diff_peek=None src=None modification_time=None access_time=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:39 managed-node2 python3.12[7067]: ansible-user Invoked with name=podman_basic_user uid=3001 state=present non_unique=False force=False remove=False create_home=True system=False move_home=False append=False ssh_key_bits=0 ssh_key_type=rsa ssh_key_comment=ansible-generated on managed-node2 update_password=always group=None groups=None comment=None home=None shell=None password=NOT_LOGGING_PARAMETER login_class=None password_expire_max=None password_expire_min=None password_expire_warn=None hidden=None seuser=None skeleton=None generate_ssh_key=None ssh_key_file=None ssh_key_passphrase=NOT_LOGGING_PARAMETER expires=None password_lock=None local=None profile=None authorization=None role=None umask=None Jan 11 11:28:39 managed-node2 useradd[7069]: new group: name=podman_basic_user, GID=3001 Jan 11 11:28:39 managed-node2 useradd[7069]: new user: name=podman_basic_user, UID=3001, GID=3001, home=/home/podman_basic_user, shell=/bin/bash, from=/dev/pts/0 Jan 11 11:28:40 managed-node2 python3.12[7194]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1 state=directory mode=0755 owner=podman_basic_user recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:40 managed-node2 python3.12[7319]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd2 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:41 managed-node2 python3.12[7444]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd3 state=directory mode=0755 owner=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:41 managed-node2 python3.12[7569]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:41 managed-node2 python3.12[7669]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd1/index.txt mode=0644 owner=podman_basic_user src=/root/.ansible/tmp/ansible-tmp-1736612921.1510544-7094-193496919768865/.source.txt _original_basename=.060zt5t3 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:42 managed-node2 python3.12[7794]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd2/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:42 managed-node2 python3.12[7894]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd2/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1736612921.821283-7094-184250787704089/.source.txt _original_basename=.wa9mfvw9 follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:42 managed-node2 python3.12[8019]: ansible-ansible.legacy.stat Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd3/index.txt follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:28:43 managed-node2 python3.12[8119]: ansible-ansible.legacy.copy Invoked with dest=/tmp/lsr_adhv0_3l_podman/httpd3/index.txt mode=0644 owner=root src=/root/.ansible/tmp/ansible-tmp-1736612922.4798434-7094-30966835373969/.source.txt _original_basename=.8cy2dulw follow=False checksum=40bd001563085fc35165329ea1ff5c5ecbdbbeef backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:28:43 managed-node2 python3.12[8244]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:28:43 managed-node2 python3.12[8369]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:28:45 managed-node2 sudo[8619]: root : TTY=pts/0 ; PWD=/root ; USER=root ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-anvwlwokizgjnjnjtuosodawsgjcavfv ; /usr/bin/python3.12 /root/.ansible/tmp/ansible-tmp-1736612925.1132412-7210-124838245355389/AnsiballZ_dnf.py' Jan 11 11:28:45 managed-node2 sudo[8619]: pam_unix(sudo:session): session opened for user root(uid=0) by root(uid=0) Jan 11 11:28:45 managed-node2 python3.12[8622]: ansible-ansible.legacy.dnf Invoked with name=['iptables-nft', 'podman', 'shadow-utils-subid'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:28:54 managed-node2 kernel: SELinux: Converting 385 SID table entries... Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:28:54 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: Converting 386 SID table entries... Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:28:57 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:28:57 managed-node2 setsebool[8707]: The virt_use_nfs policy boolean was changed to 1 by root Jan 11 11:28:57 managed-node2 setsebool[8707]: The virt_sandbox_use_all_caps policy boolean was changed to 1 by root Jan 11 11:29:00 managed-node2 kernel: SELinux: Converting 393 SID table entries... Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:29:00 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:29:01 managed-node2 groupadd[8727]: group added to /etc/group: name=polkitd, GID=114 Jan 11 11:29:01 managed-node2 groupadd[8727]: group added to /etc/gshadow: name=polkitd Jan 11 11:29:01 managed-node2 groupadd[8727]: new group: name=polkitd, GID=114 Jan 11 11:29:01 managed-node2 useradd[8730]: new user: name=polkitd, UID=114, GID=114, home=/, shell=/sbin/nologin, from=none Jan 11 11:29:01 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:01 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:01 managed-node2 systemd[1]: Listening on pcscd.socket - PC/SC Smart Card Daemon Activation Socket. ░░ Subject: A start job for unit pcscd.socket has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit pcscd.socket has finished successfully. ░░ ░░ The job identifier is 1220. Jan 11 11:29:02 managed-node2 systemd[1]: Started run-p9129-i9429.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p9129-i9429.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p9129-i9429.service has finished successfully. ░░ ░░ The job identifier is 1298. Jan 11 11:29:02 managed-node2 systemctl[9130]: Warning: The unit file, source configuration file or drop-ins of man-db-cache-update.service changed on disk. Run 'systemctl daemon-reload' to reload units. Jan 11 11:29:02 managed-node2 systemd[1]: Reload requested from client PID 9133 ('systemctl') (unit session-5.scope)... Jan 11 11:29:02 managed-node2 systemd[1]: Reloading... Jan 11 11:29:02 managed-node2 systemd-ssh-generator[9182]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:29:02 managed-node2 systemd-rc-local-generator[9179]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:29:02 managed-node2 (sd-exec-[9152]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:29:02 managed-node2 systemd[1]: Reloading finished in 195 ms. Jan 11 11:29:02 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1376. Jan 11 11:29:02 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 11 11:29:02 managed-node2 systemd[4339]: Received SIGRTMIN+25 from PID 1 (systemd). Jan 11 11:29:02 managed-node2 systemd[1]: Reloading user@0.service - User Manager for UID 0... ░░ Subject: A reload job for unit user@0.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has begun execution. ░░ ░░ The job identifier is 1454. Jan 11 11:29:02 managed-node2 systemd[4339]: Reexecuting. Jan 11 11:29:02 managed-node2 systemd[1]: Reloaded user@0.service - User Manager for UID 0. ░░ Subject: A reload job for unit user@0.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A reload job for unit user@0.service has finished. ░░ ░░ The job identifier is 1454 and the job result is done. Jan 11 11:29:03 managed-node2 sudo[8619]: pam_unix(sudo:session): session closed for user root Jan 11 11:29:04 managed-node2 python3.12[9871]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:05 managed-node2 python3.12[10008]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:29:05 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:29:05 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1376. Jan 11 11:29:05 managed-node2 systemd[1]: run-p9129-i9429.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p9129-i9429.service has successfully entered the 'dead' state. Jan 11 11:29:05 managed-node2 python3.12[10144]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:06 managed-node2 python3.12[10277]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:07 managed-node2 python3.12[10408]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:07 managed-node2 python3.12[10539]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:09 managed-node2 python3.12[10671]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:29:09 managed-node2 python3.12[10804]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:29:09 managed-node2 systemd[1]: Reload requested from client PID 10807 ('systemctl') (unit session-5.scope)... Jan 11 11:29:09 managed-node2 systemd[1]: Reloading... Jan 11 11:29:10 managed-node2 systemd-rc-local-generator[10845]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:29:10 managed-node2 systemd-ssh-generator[10851]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:29:10 managed-node2 (sd-exec-[10825]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:29:10 managed-node2 systemd[1]: Reloading finished in 188 ms. Jan 11 11:29:10 managed-node2 systemd[1]: Starting firewalld.service - firewalld - dynamic firewall daemon... ░░ Subject: A start job for unit firewalld.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has begun execution. ░░ ░░ The job identifier is 1455. Jan 11 11:29:10 managed-node2 systemd[1]: Started firewalld.service - firewalld - dynamic firewall daemon. ░░ Subject: A start job for unit firewalld.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit firewalld.service has finished successfully. ░░ ░░ The job identifier is 1455. Jan 11 11:29:10 managed-node2 kernel: Warning: Unmaintained driver is detected: ip_set Jan 11 11:29:10 managed-node2 systemd[1]: Starting polkit.service - Authorization Manager... ░░ Subject: A start job for unit polkit.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has begun execution. ░░ ░░ The job identifier is 1538. Jan 11 11:29:10 managed-node2 polkitd[10993]: Started polkitd version 125 Jan 11 11:29:10 managed-node2 systemd[1]: Started polkit.service - Authorization Manager. ░░ Subject: A start job for unit polkit.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit polkit.service has finished successfully. ░░ ░░ The job identifier is 1538. Jan 11 11:29:11 managed-node2 python3.12[11045]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:29:12 managed-node2 python3.12[11177]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:13 managed-node2 python3.12[11308]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:13 managed-node2 python3.12[11439]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:14 managed-node2 python3.12[11571]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:15 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:15 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:29:15 managed-node2 systemd[1]: Started run-p11577-i11877.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p11577-i11877.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p11577-i11877.service has finished successfully. ░░ ░░ The job identifier is 1619. Jan 11 11:29:15 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 1697. Jan 11 11:29:15 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:29:15 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 1697. Jan 11 11:29:15 managed-node2 systemd[1]: run-p11577-i11877.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p11577-i11877.service has successfully entered the 'dead' state. Jan 11 11:29:15 managed-node2 python3.12[11711]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:29:17 managed-node2 python3.12[11875]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:29:18 managed-node2 kernel: SELinux: Converting 423 SID table entries... Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability network_peer_controls=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability open_perms=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability extended_socket_class=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability always_check_network=0 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability cgroup_seclabel=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability nnp_nosuid_transition=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability genfs_seclabel_symlinks=1 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability ioctl_skip_cloexec=0 Jan 11 11:29:18 managed-node2 kernel: SELinux: policy capability userspace_initial_context=0 Jan 11 11:29:18 managed-node2 python3.12[12010]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:29:23 managed-node2 python3.12[12141]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:24 managed-node2 python3.12[12274]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:25 managed-node2 python3.12[12405]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:25 managed-node2 python3.12[12536]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:29:25 managed-node2 python3.12[12641]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/nopull.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736612965.2957008-8350-171865569281770/.source.yml _original_basename=.uofn7p8f follow=False checksum=d5dc917e3cae36de03aa971a17ac473f86fdf934 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:26 managed-node2 python3.12[12772]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:29:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-compat2222882315-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-compat2222882315-merged.mount has successfully entered the 'dead' state. Jan 11 11:29:26 managed-node2 rsyslogd[886]: imjournal: journal files changed, reloading... [v8.2412.0-1.el10 try https://www.rsyslog.com/e/0 ] Jan 11 11:29:26 managed-node2 kernel: evm: overlay not supported Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.527506046 -0500 EST m=+0.076751034 system refresh Jan 11 11:29:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay-metacopy\x2dcheck928390053-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-metacopy\x2dcheck928390053-merged.mount has successfully entered the 'dead' state. Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.802195054 -0500 EST m=+0.351440103 image build f41b26d639b72cab2f92dd19cf0a7b368c0addd834cde4dfbe6ba9cc0b5e9755 Jan 11 11:29:26 managed-node2 systemd[1]: Created slice machine.slice - Slice /machine. ░░ Subject: A start job for unit machine.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine.slice has finished successfully. ░░ ░░ The job identifier is 1776. Jan 11 11:29:26 managed-node2 systemd[1]: Created slice machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice - cgroup machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice. ░░ Subject: A start job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished successfully. ░░ ░░ The job identifier is 1775. Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.84857707 -0500 EST m=+0.397821985 container create 8b291853c44d64b81d1ceed7ffd8c7c0a03d32d84fe9fcd377032ab3ba163ed6 (image=localhost/podman-pause:5.3.1-1733097600, name=8f21b98d6977-infra, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, io.buildah.version=1.38.0) Jan 11 11:29:26 managed-node2 podman[12779]: 2025-01-11 11:29:26.854692046 -0500 EST m=+0.403936935 pod create 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:29:28 managed-node2 podman[12779]: 2025-01-11 11:29:28.907614035 -0500 EST m=+2.456859238 image pull 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 11 11:29:28 managed-node2 podman[12779]: 2025-01-11 11:29:28.930557469 -0500 EST m=+2.479802384 container create 010c205b58090946e480f7d230e44e257dd9db66fbcfe0636fb2c68d69f1bba4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, created_by=test/system/build-testimage, io.buildah.version=1.21.0, app=test, io.containers.autoupdate=registry, created_at=2021-06-10T18:55:36Z) Jan 11 11:29:28 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:31 managed-node2 python3.12[13112]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:32 managed-node2 python3.12[13249]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:35 managed-node2 python3.12[13382]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:36 managed-node2 python3.12[13514]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:29:36 managed-node2 python3.12[13647]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:29:37 managed-node2 python3.12[13780]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:29:39 managed-node2 python3.12[13911]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:40 managed-node2 python3.12[14043]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:29:41 managed-node2 python3.12[14175]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:29:42 managed-node2 python3.12[14335]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:29:43 managed-node2 python3.12[14466]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:29:47 managed-node2 python3.12[14597]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:49 managed-node2 podman[14738]: 2025-01-11 11:29:49.434170482 -0500 EST m=+0.252936003 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 11 11:29:49 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:49 managed-node2 python3.12[14876]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:50 managed-node2 python3.12[15007]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:50 managed-node2 python3.12[15138]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:29:50 managed-node2 python3.12[15243]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/ansible-kubernetes.d/bogus.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736612990.3622415-9267-128608785517959/.source.yml _original_basename=.lo9fbot2 follow=False checksum=f8266a972ed3be7e204d2a67883fe3a22b8dbf18 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:29:51 managed-node2 python3.12[15374]: ansible-containers.podman.podman_play Invoked with state=created kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:29:51 managed-node2 systemd[1]: Created slice machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice - cgroup machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice. ░░ Subject: A start job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished successfully. ░░ ░░ The job identifier is 1781. Jan 11 11:29:51 managed-node2 podman[15381]: 2025-01-11 11:29:51.566032778 -0500 EST m=+0.059424724 container create 5a92effc85487f4e0d67a204339e8c66d39fd2acb0e397ba7a06494670bdc5fa (image=localhost/podman-pause:5.3.1-1733097600, name=6939fbfdbe78-infra, pod_id=6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7, io.buildah.version=1.38.0) Jan 11 11:29:51 managed-node2 podman[15381]: 2025-01-11 11:29:51.57210719 -0500 EST m=+0.065499244 pod create 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:29:52 managed-node2 podman[15381]: 2025-01-11 11:29:52.780406701 -0500 EST m=+1.273798703 image pull-error quay.io/linux-system-roles/this_is_a_bogus_image:latest initializing source docker://quay.io/linux-system-roles/this_is_a_bogus_image:latest: reading manifest latest in quay.io/linux-system-roles/this_is_a_bogus_image: unauthorized: access to the requested resource is not authorized Jan 11 11:29:52 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:29:55 managed-node2 python3.12[15650]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:29:56 managed-node2 python3.12[15787]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:29:58 managed-node2 python3.12[15920]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:00 managed-node2 python3.12[16052]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:00 managed-node2 python3.12[16185]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:01 managed-node2 python3.12[16318]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:03 managed-node2 python3.12[16449]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:03 managed-node2 python3.12[16581]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:04 managed-node2 python3.12[16713]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:06 managed-node2 python3.12[16873]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:06 managed-node2 python3.12[17004]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:11 managed-node2 python3.12[17135]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:12 managed-node2 python3.12[17268]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/nopull.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:13 managed-node2 python3.12[17400]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-nopull.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 11 11:30:13 managed-node2 python3.12[17533]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:14 managed-node2 python3.12[17666]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/nopull.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:30:14 managed-node2 python3.12[17666]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/nopull.yml Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.120968075 -0500 EST m=+0.024014018 pod stop 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:30:14 managed-node2 systemd[1]: Removed slice machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice - cgroup machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice. ░░ Subject: A stop job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844.slice has finished. ░░ ░░ The job identifier is 1787 and the job result is done. Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.1635176 -0500 EST m=+0.066563524 container remove 010c205b58090946e480f7d230e44e257dd9db66fbcfe0636fb2c68d69f1bba4 (image=quay.io/libpod/testimage:20210610, name=nopull-nopull, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, created_at=2021-06-10T18:55:36Z, created_by=test/system/build-testimage, io.buildah.version=1.21.0, io.containers.autoupdate=registry, app=test) Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.18894132 -0500 EST m=+0.091987362 container remove 8b291853c44d64b81d1ceed7ffd8c7c0a03d32d84fe9fcd377032ab3ba163ed6 (image=localhost/podman-pause:5.3.1-1733097600, name=8f21b98d6977-infra, pod_id=8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844, io.buildah.version=1.38.0) Jan 11 11:30:14 managed-node2 podman[17673]: 2025-01-11 11:30:14.198654504 -0500 EST m=+0.101700390 pod remove 8f21b98d69777a1218bb5004f4328b2c021f4c67e942421bf447537e489b9844 (image=, name=nopull) Jan 11 11:30:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:14 managed-node2 python3.12[17813]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/nopull.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:15 managed-node2 python3.12[17944]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:18 managed-node2 python3.12[18214]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:19 managed-node2 python3.12[18351]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:22 managed-node2 python3.12[18484]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:23 managed-node2 python3.12[18616]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:24 managed-node2 python3.12[18749]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:24 managed-node2 python3.12[18882]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:26 managed-node2 python3.12[19013]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:27 managed-node2 python3.12[19145]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:28 managed-node2 python3.12[19277]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:30 managed-node2 python3.12[19437]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:30 managed-node2 python3.12[19568]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:35 managed-node2 python3.12[19699]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:36 managed-node2 python3.12[19832]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /etc/containers/ansible-kubernetes.d/bogus.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:37 managed-node2 python3.12[19964]: ansible-systemd Invoked with name=podman-kube@-etc-containers-ansible\x2dkubernetes.d-bogus.yml.service scope=system state=stopped enabled=False daemon_reload=False daemon_reexec=False no_block=False force=None masked=None Jan 11 11:30:38 managed-node2 python3.12[20097]: ansible-stat Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:38 managed-node2 python3.12[20230]: ansible-containers.podman.podman_play Invoked with state=absent kube_file=/etc/containers/ansible-kubernetes.d/bogus.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None debug=None quiet=None recreate=None userns=None log_level=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:30:38 managed-node2 python3.12[20230]: ansible-containers.podman.podman_play version: 5.3.1, kube file /etc/containers/ansible-kubernetes.d/bogus.yml Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.640117526 -0500 EST m=+0.024351398 pod stop 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:30:38 managed-node2 systemd[1]: Removed slice machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice - cgroup machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice. ░░ Subject: A stop job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7.slice has finished. ░░ ░░ The job identifier is 1789 and the job result is done. Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.680455077 -0500 EST m=+0.064688863 container remove 5a92effc85487f4e0d67a204339e8c66d39fd2acb0e397ba7a06494670bdc5fa (image=localhost/podman-pause:5.3.1-1733097600, name=6939fbfdbe78-infra, pod_id=6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7, io.buildah.version=1.38.0) Jan 11 11:30:38 managed-node2 podman[20237]: 2025-01-11 11:30:38.690033298 -0500 EST m=+0.074267051 pod remove 6939fbfdbe7806915cedaa414a8bc98c9b016a05e789d967c53a3b35b3cd92a7 (image=, name=bogus) Jan 11 11:30:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:39 managed-node2 python3.12[20377]: ansible-file Invoked with path=/etc/containers/ansible-kubernetes.d/bogus.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:30:39 managed-node2 python3.12[20508]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:30:42 managed-node2 python3.12[20778]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:30:43 managed-node2 python3.12[20916]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:30:47 managed-node2 python3.12[21049]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:48 managed-node2 python3.12[21181]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:30:48 managed-node2 python3.12[21314]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:30:49 managed-node2 python3.12[21447]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['15001-15003/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:30:51 managed-node2 python3.12[21578]: ansible-ansible.legacy.dnf Invoked with name=['python3-libselinux', 'python3-policycoreutils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:52 managed-node2 python3.12[21710]: ansible-ansible.legacy.dnf Invoked with name=['policycoreutils-python-utils'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:30:53 managed-node2 python3.12[21842]: ansible-setup Invoked with filter=['ansible_selinux'] gather_subset=['all'] gather_timeout=10 fact_path=/etc/ansible/facts.d Jan 11 11:30:54 managed-node2 python3.12[22002]: ansible-fedora.linux_system_roles.local_seport Invoked with ports=['15001-15003'] proto=tcp setype=http_port_t state=present local=False ignore_selinux_state=False reload=True Jan 11 11:30:55 managed-node2 python3.12[22133]: ansible-fedora.linux_system_roles.selinux_modules_facts Invoked Jan 11 11:30:59 managed-node2 python3.12[22264]: ansible-getent Invoked with database=passwd key=podman_basic_user fail_key=False service=None split=None Jan 11 11:31:00 managed-node2 python3.12[22396]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:00 managed-node2 python3.12[22529]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:01 managed-node2 python3.12[22661]: ansible-ansible.legacy.command Invoked with _raw_params=getsubids -g podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:02 managed-node2 python3.12[22793]: ansible-ansible.legacy.command Invoked with _raw_params=systemd-escape --template podman-kube@.service /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:03 managed-node2 python3.12[22925]: ansible-ansible.legacy.command Invoked with creates=/var/lib/systemd/linger/podman_basic_user _raw_params=loginctl enable-linger podman_basic_user _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None removes=None stdin=None Jan 11 11:31:03 managed-node2 systemd[1]: Created slice user-3001.slice - User Slice of UID 3001. ░░ Subject: A start job for unit user-3001.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-3001.slice has finished successfully. ░░ ░░ The job identifier is 1869. Jan 11 11:31:03 managed-node2 systemd[1]: Starting user-runtime-dir@3001.service - User Runtime Directory /run/user/3001... ░░ Subject: A start job for unit user-runtime-dir@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has begun execution. ░░ ░░ The job identifier is 1791. Jan 11 11:31:03 managed-node2 systemd[1]: Finished user-runtime-dir@3001.service - User Runtime Directory /run/user/3001. ░░ Subject: A start job for unit user-runtime-dir@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user-runtime-dir@3001.service has finished successfully. ░░ ░░ The job identifier is 1791. Jan 11 11:31:03 managed-node2 systemd[1]: Starting user@3001.service - User Manager for UID 3001... ░░ Subject: A start job for unit user@3001.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has begun execution. ░░ ░░ The job identifier is 1871. Jan 11 11:31:03 managed-node2 systemd-logind[659]: New session 6 of user podman_basic_user. ░░ Subject: A new session 6 has been created for user podman_basic_user ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ Documentation: sd-login(3) ░░ ░░ A new session with the ID 6 has been created for the user podman_basic_user. ░░ ░░ The leading process of the session is 22929. Jan 11 11:31:03 managed-node2 (systemd)[22929]: pam_unix(systemd-user:session): session opened for user podman_basic_user(uid=3001) by podman_basic_user(uid=0) Jan 11 11:31:03 managed-node2 systemd[22929]: Queued start job for default target default.target. Jan 11 11:31:03 managed-node2 systemd[22929]: Created slice app.slice - User Application Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 5. Jan 11 11:31:03 managed-node2 systemd[22929]: Started grub-boot-success.timer - Mark boot as successful after the user session has run 2 minutes. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 11. Jan 11 11:31:03 managed-node2 systemd[22929]: Started systemd-tmpfiles-clean.timer - Daily Cleanup of User's Temporary Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 12. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target paths.target - Paths. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 8. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target timers.target - Timers. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 10. Jan 11 11:31:03 managed-node2 systemd[22929]: Starting dbus.socket - D-Bus User Message Bus Socket... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 4. Jan 11 11:31:03 managed-node2 systemd[22929]: Starting systemd-tmpfiles-setup.service - Create User Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 9. Jan 11 11:31:03 managed-node2 systemd[22929]: Finished systemd-tmpfiles-setup.service - Create User Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 9. Jan 11 11:31:03 managed-node2 systemd[22929]: Listening on dbus.socket - D-Bus User Message Bus Socket. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 4. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target sockets.target - Sockets. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 3. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target basic.target - Basic System. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 2. Jan 11 11:31:03 managed-node2 systemd[22929]: Reached target default.target - Main User Target. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 1. Jan 11 11:31:03 managed-node2 systemd[1]: Started user@3001.service - User Manager for UID 3001. ░░ Subject: A start job for unit user@3001.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit user@3001.service has finished successfully. ░░ ░░ The job identifier is 1871. Jan 11 11:31:03 managed-node2 systemd[22929]: Startup finished in 69ms. ░░ Subject: User manager start-up is now complete ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The user manager instance for user 3001 has been started. All services queued ░░ for starting have been started. Note that other services might still be starting ░░ up or be started at any later time. ░░ ░░ Startup of the manager took 69160 microseconds. Jan 11 11:31:04 managed-node2 python3.12[23075]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1 state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:04 managed-node2 python3.12[23206]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman/httpd1-create state=directory owner=podman_basic_user group=3001 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:05 managed-node2 sudo[23379]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-ebjivediwrynnjycvbbcvkpnrvrgvyty ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613064.7957578-12329-92755468246573/AnsiballZ_podman_image.py' Jan 11 11:31:05 managed-node2 sudo[23379]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 11 11:31:05 managed-node2 systemd[22929]: Created slice session.slice - User Core Session Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:31:05 managed-node2 systemd[22929]: Starting dbus-broker.service - D-Bus User Message Bus... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:05 managed-node2 systemd[22929]: Started dbus-broker.service - D-Bus User Message Bus. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:31:05 managed-node2 dbus-broker-launch[23403]: Ready Jan 11 11:31:05 managed-node2 systemd[22929]: Created slice user.slice - Slice /user. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 20. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-23389.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 19. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-pause-de278718.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 23. Jan 11 11:31:05 managed-node2 systemd[22929]: Started podman-23405.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 27. Jan 11 11:31:06 managed-node2 systemd[22929]: Started podman-23430.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 31. Jan 11 11:31:06 managed-node2 sudo[23379]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 11 11:31:07 managed-node2 python3.12[23567]: ansible-stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:07 managed-node2 python3.12[23698]: ansible-file Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d state=directory owner=podman_basic_user group=3001 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:08 managed-node2 python3.12[23829]: ansible-ansible.legacy.stat Invoked with path=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:08 managed-node2 python3.12[23934]: ansible-ansible.legacy.copy Invoked with dest=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml owner=podman_basic_user group=3001 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613067.8332987-12432-130581053166951/.source.yml _original_basename=.vntwb0uw follow=False checksum=bd406dc7744755fdf41f83e27a5ef8497bec46ba backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:08 managed-node2 sudo[24107]: root : TTY=pts/0 ; PWD=/root ; USER=podman_basic_user ; COMMAND=/bin/sh -c 'echo BECOME-SUCCESS-hdtiduuiveohnpfnsmzuxmnranaqsupd ; XDG_RUNTIME_DIR=/run/user/3001 /usr/bin/python3.12 /var/tmp/ansible-tmp-1736613068.54518-12462-228328251565918/AnsiballZ_podman_play.py' Jan 11 11:31:08 managed-node2 sudo[24107]: pam_unix(sudo:session): session opened for user podman_basic_user(uid=3001) by root(uid=0) Jan 11 11:31:08 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play Invoked with state=started debug=True log_level=debug kube_file=/home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml executable=podman annotation=None kube_file_content=None authfile=None build=None cert_dir=None configmap=None context_dir=None seccomp_profile_root=None username=None password=NOT_LOGGING_PARAMETER log_driver=None log_opt=None network=None tls_verify=None quiet=None recreate=None userns=None quadlet_dir=None quadlet_filename=None quadlet_file_mode=None quadlet_options=None Jan 11 11:31:08 managed-node2 systemd[22929]: Started podman-24117.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 35. Jan 11 11:31:09 managed-node2 systemd[22929]: Created slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice - cgroup user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 39. Jan 11 11:31:09 managed-node2 kernel: tun: Universal TUN/TAP device driver, 1.6 Jan 11 11:31:09 managed-node2 systemd[22929]: Started rootless-netns-832a999c.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 43. Jan 11 11:31:09 managed-node2 kernel: bridge: filtering via arp/ip/ip6tables is no longer available by default. Update your scripts to load br_netfilter if you need this. Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:31:09 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered blocking state Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered forwarding state Jan 11 11:31:09 managed-node2 systemd[22929]: Started run-p24198-i24498.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/user/3001/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 47. Jan 11 11:31:09 managed-node2 aardvark-dns[24198]: starting aardvark on a child with pid 24199 Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Successfully parsed config Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v4 ip {"podman-default-kube-network": [10.89.0.1]} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v6 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Using the following upstream servers: [169.254.1.1:53, 10.29.169.13:53, 10.29.170.12:53] Jan 11 11:31:09 managed-node2 conmon[24214]: conmon 6494e2ff6f9e6c9552ee : failed to write to /proc/self/oom_score_adj: Permission denied Jan 11 11:31:09 managed-node2 systemd[22929]: Started libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 51. Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : addr{sun_family=AF_UNIX, sun_path=/proc/self/fd/14/attach} Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : terminal_ctrl_fd: 14 Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : winsz read side: 17, winsz write side: 18 Jan 11 11:31:09 managed-node2 systemd[22929]: Started libpod-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope - libcrun container. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 56. Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : container PID: 24217 Jan 11 11:31:09 managed-node2 conmon[24215]: conmon 6494e2ff6f9e6c9552ee : container 24217 exited with status 127 Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Called cleanup.PersistentPreRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e)" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Setting custom database backend: \"sqlite\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Using sqlite as database backend" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="systemd-logind: Unknown object '/'." Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using graph driver overlay" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using run root /run/user/3001/containers" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using transient store: false" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that overlay is supported" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that metacopy is not being used" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cached value indicated that native-diff is usable" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Initializing event backend file" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Setting parallel job count to 7" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Tearing down network namespace at /run/user/3001/netns/netns-1ba1aabb-9ce7-ed09-ced0-c13ed359f037 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fb42127ceae097e95afb4c22c535aaa88e07cb9147b8dd0d6c01dc2c233a4302 bridge podman1 2025-01-11 11:31:08.992158363 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded 2 networks" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Tearing down..\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [INFO netavark::firewall] Using nftables firewall driver\n" Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Received SIGHUP Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Successfully parsed config Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v4 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: Listen v6 ip {} Jan 11 11:31:09 managed-node2 aardvark-dns[24199]: No configuration found stopping the sever Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:31:09 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:31:09 managed-node2 kernel: podman1: port 1(veth0) entered disabled state Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [INFO netavark::network::bridge] removing bridge podman1\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"INPUT\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(Meta(Meta { key: L4proto })), right: Named(Set([Element(String(\"tcp\")), Element(String(\"udp\"))])), op: EQ }), Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"th\", field: \"dport\" }))), right: Number(53), op: EQ }), Accept(None)], handle: Some(23), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"daddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Match(Match { left: Named(CT(CT { key: \"state\", family: None, dir: None })), right: List([String(\"established\"), String(\"related\")]), op: IN }), Accept(None)], handle: Some(24), index: None, comment: None }\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"FORWARD\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Accept(None)], handle: Some(25), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"POSTROUTING\", expr: [Match(Match { left: Named(Payload(PayloadField(PayloadField { protocol: \"ip\", field: \"saddr\" }))), right: Named(Prefix(Prefix { addr: String(\"10.89.0.0\"), len: 24 })), op: EQ }), Jump(JumpTarget { target: \"nv_fb42127c_10_89_0_0_nm24\" })], handle: Some(26), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 4 rules\n[DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Matched Rule { family: INet, table: \"netavark\", chain: \"NETAVARK-ISOLATION-3\", expr: [Match(Match { left: Named(Meta(Meta { key: Oifname })), right: String(\"podman1\"), op: EQ }), Drop(None)], handle: Some(17), index: None, comment: None }\n[DEBUG netavark::firewall::nft] Removing 1 isolation rules for network\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24_dnat\n[DEBUG netavark::firewall::nft] Found chain nv_fb42127c_10_89_0_0_nm24_dnat\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="netavark: [DEBUG netavark::commands::teardown] Teardown complete\n" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up rootless network namespace" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully cleaned up container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Unmounted container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\"" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Called cleanup.PersistentPostRunE(/usr/bin/podman --root /home/podman_basic_user/.local/share/containers/storage --runroot /run/user/3001/containers --log-level debug --cgroup-manager systemd --tmpdir /run/user/3001/libpod/tmp --network-config-dir --network-backend netavark --volumepath /home/podman_basic_user/.local/share/containers/storage/volumes --db-backend sqlite --transient-store=false --runtime crun --storage-driver overlay --events-backend file --syslog container cleanup --stopped-only 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e)" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=debug msg="Shutting down engines" Jan 11 11:31:09 managed-node2 /usr/bin/podman[24219]: time="2025-01-11T11:31:09-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24219 Jan 11 11:31:09 managed-node2 systemd[22929]: Stopping libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope... ░░ Subject: A stop job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has begun execution. ░░ ░░ The job identifier is 62. Jan 11 11:31:09 managed-node2 systemd[22929]: Stopped libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 62 and the job result is done. Jan 11 11:31:09 managed-node2 systemd[22929]: Removed slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice - cgroup user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice. ░░ Subject: A stop job for unit UNIT has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit UNIT has finished. ░░ ░░ The job identifier is 61 and the job result is done. Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE command: /bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stdout: [starting container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de: cannot get namespace path unless container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e is running: container is stopped] Pod: cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da Container: 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE stderr: time="2025-01-11T11:31:08-05:00" level=info msg="/bin/podman filtering at log level debug" time="2025-01-11T11:31:08-05:00" level=debug msg="Called kube.PersistentPreRunE(/bin/podman play kube --start=true --log-level=debug /home/podman_basic_user/.config/containers/ansible-kubernetes.d/httpd1.yml)" time="2025-01-11T11:31:08-05:00" level=debug msg="Using conmon: \"/usr/bin/conmon\"" time="2025-01-11T11:31:08-05:00" level=info msg="Using sqlite as database backend" time="2025-01-11T11:31:08-05:00" level=debug msg="systemd-logind: Unknown object '/'." time="2025-01-11T11:31:08-05:00" level=debug msg="Using graph driver overlay" time="2025-01-11T11:31:08-05:00" level=debug msg="Using graph root /home/podman_basic_user/.local/share/containers/storage" time="2025-01-11T11:31:08-05:00" level=debug msg="Using run root /run/user/3001/containers" time="2025-01-11T11:31:08-05:00" level=debug msg="Using static dir /home/podman_basic_user/.local/share/containers/storage/libpod" time="2025-01-11T11:31:08-05:00" level=debug msg="Using tmp dir /run/user/3001/libpod/tmp" time="2025-01-11T11:31:08-05:00" level=debug msg="Using volume path /home/podman_basic_user/.local/share/containers/storage/volumes" time="2025-01-11T11:31:08-05:00" level=debug msg="Using transient store: false" time="2025-01-11T11:31:08-05:00" level=debug msg="[graphdriver] trying provided driver \"overlay\"" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that overlay is supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that metacopy is not being used" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that native-diff is usable" time="2025-01-11T11:31:08-05:00" level=debug msg="backingFs=xfs, projectQuotaSupported=false, useNativeDiff=true, usingMetacopy=false" time="2025-01-11T11:31:08-05:00" level=debug msg="Initializing event backend file" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime ocijail initialization failed: no valid executable found for OCI runtime ocijail: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime crun-vm initialization failed: no valid executable found for OCI runtime crun-vm: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime youki initialization failed: no valid executable found for OCI runtime youki: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime kata initialization failed: no valid executable found for OCI runtime kata: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runsc initialization failed: no valid executable found for OCI runtime runsc: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime krun initialization failed: no valid executable found for OCI runtime krun: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime crun-wasm initialization failed: no valid executable found for OCI runtime crun-wasm: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runc initialization failed: no valid executable found for OCI runtime runc: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Configured OCI runtime runj initialization failed: no valid executable found for OCI runtime runj: invalid argument" time="2025-01-11T11:31:08-05:00" level=debug msg="Using OCI runtime \"/usr/bin/crun\"" time="2025-01-11T11:31:08-05:00" level=info msg="Setting parallel job count to 7" time="2025-01-11T11:31:08-05:00" level=debug msg="Successfully loaded 1 networks" time="2025-01-11T11:31:08-05:00" level=debug msg="found free device name podman1" time="2025-01-11T11:31:08-05:00" level=debug msg="found free ipv4 network subnet 10.89.0.0/24" time="2025-01-11T11:31:08-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="reference \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" does not resolve to an image ID" time="2025-01-11T11:31:08-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="FROM \"scratch\"" time="2025-01-11T11:31:08-05:00" level=debug msg="Cached value indicated that idmapped mounts for overlay are not supported" time="2025-01-11T11:31:08-05:00" level=debug msg="Check for idmapped mounts support " time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:08-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: test mount indicated that volatile is being used" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/empty,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/work,userxattr,volatile,context=\"system_u:object_r:container_file_t:s0:c681,c972\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container ID: 108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22" time="2025-01-11T11:31:09-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:copy Args:[/usr/libexec/podman/catatonit /catatonit] Flags:[] Attrs:map[] Message:COPY /usr/libexec/podman/catatonit /catatonit Heredocs:[] Original:COPY /usr/libexec/podman/catatonit /catatonit}" time="2025-01-11T11:31:09-05:00" level=debug msg="COPY []string(nil), imagebuilder.Copy{FromFS:false, From:\"\", Src:[]string{\"/usr/libexec/podman/catatonit\"}, Dest:\"/catatonit\", Download:false, Chown:\"\", Chmod:\"\", Checksum:\"\", Files:[]imagebuilder.File(nil), KeepGitDir:false, Link:false, Parents:false, Excludes:[]string(nil)}" time="2025-01-11T11:31:09-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\"" time="2025-01-11T11:31:09-05:00" level=debug msg="added content file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67" time="2025-01-11T11:31:09-05:00" level=debug msg="Parsed Step: {Env:[PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin] Command:entrypoint Args:[/catatonit -P] Flags:[] Attrs:map[json:true] Message:ENTRYPOINT /catatonit -P Heredocs:[] Original:ENTRYPOINT [\"/catatonit\", \"-P\"]}" time="2025-01-11T11:31:09-05:00" level=debug msg="EnsureContainerPath \"/\" (owner \"\", mode 0) in \"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\"" time="2025-01-11T11:31:09-05:00" level=debug msg="COMMIT localhost/podman-pause:5.3.1-1733097600" time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="COMMIT \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="committing image with reference \"containers-storage:[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\" is allowed by policy" time="2025-01-11T11:31:09-05:00" level=debug msg="layer list: [\"ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0\"]" time="2025-01-11T11:31:09-05:00" level=debug msg="using \"/var/tmp/buildah4034309036\" to hold temporary data" time="2025-01-11T11:31:09-05:00" level=debug msg="Tar with options on /home/podman_basic_user/.local/share/containers/storage/overlay/ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0/diff" time="2025-01-11T11:31:09-05:00" level=debug msg="layer \"ce5d425d1a334b7a751d21348811c909114800550f7ebdee36d06daf3bf588d0\" size is 699392 bytes, uncompressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6, possibly-compressed digest sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6" time="2025-01-11T11:31:09-05:00" level=debug msg="OCIv1 config = {\"created\":\"2025-01-11T16:31:09.16093935Z\",\"architecture\":\"amd64\",\"os\":\"linux\",\"config\":{\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Entrypoint\":[\"/catatonit\",\"-P\"],\"WorkingDir\":\"/\",\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-11T16:31:09.133319185Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-11T16:31:09.164063409Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="OCIv1 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.oci.image.manifest.v1+json\",\"config\":{\"mediaType\":\"application/vnd.oci.image.config.v1+json\",\"digest\":\"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\",\"size\":684},\"layers\":[{\"mediaType\":\"application/vnd.oci.image.layer.v1.tar\",\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\",\"size\":699392}],\"annotations\":{\"org.opencontainers.image.base.digest\":\"\",\"org.opencontainers.image.base.name\":\"\"}}" time="2025-01-11T11:31:09-05:00" level=debug msg="Docker v2s2 config = {\"created\":\"2025-01-11T16:31:09.16093935Z\",\"container\":\"108a4b73ea294c92e350e6914ba70ae43bc510d0757a34527239170a2fe2fa22\",\"container_config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"config\":{\"Hostname\":\"\",\"Domainname\":\"\",\"User\":\"\",\"AttachStdin\":false,\"AttachStdout\":false,\"AttachStderr\":false,\"Tty\":false,\"OpenStdin\":false,\"StdinOnce\":false,\"Env\":[\"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin\"],\"Cmd\":null,\"Image\":\"\",\"Volumes\":{},\"WorkingDir\":\"/\",\"Entrypoint\":[\"/catatonit\",\"-P\"],\"OnBuild\":[],\"Labels\":{\"io.buildah.version\":\"1.38.0\"}},\"architecture\":\"amd64\",\"os\":\"linux\",\"rootfs\":{\"type\":\"layers\",\"diff_ids\":[\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"]},\"history\":[{\"created\":\"2025-01-11T16:31:09.133319185Z\",\"created_by\":\"/bin/sh -c #(nop) COPY file:872045bc026632654869cebc14f91fe531969760bf26925e889d9b70527c8b67 in /catatonit \",\"empty_layer\":true},{\"created\":\"2025-01-11T16:31:09.164063409Z\",\"created_by\":\"/bin/sh -c #(nop) ENTRYPOINT [\\\"/catatonit\\\", \\\"-P\\\"]\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Docker v2s2 manifest = {\"schemaVersion\":2,\"mediaType\":\"application/vnd.docker.distribution.manifest.v2+json\",\"config\":{\"mediaType\":\"application/vnd.docker.container.image.v1+json\",\"size\":1347,\"digest\":\"sha256:3d5363d5b8025414d8363b8279ab1974a53f63e86ef685536caca8cc6f1f8c3a\"},\"layers\":[{\"mediaType\":\"application/vnd.docker.image.rootfs.diff.tar\",\"size\":699392,\"digest\":\"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"}]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Using SQLite blob info cache at /home/podman_basic_user/.local/share/containers/cache/blob-info-cache-v1.sqlite" time="2025-01-11T11:31:09-05:00" level=debug msg="IsRunningImageAllowed for image containers-storage:" time="2025-01-11T11:31:09-05:00" level=debug msg=" Using transport \"containers-storage\" policy section \"\"" time="2025-01-11T11:31:09-05:00" level=debug msg=" Requirement 0: allowed" time="2025-01-11T11:31:09-05:00" level=debug msg="Overall: allowed" time="2025-01-11T11:31:09-05:00" level=debug msg="start reading config" time="2025-01-11T11:31:09-05:00" level=debug msg="finished reading config" time="2025-01-11T11:31:09-05:00" level=debug msg="Manifest has MIME type application/vnd.oci.image.manifest.v1+json, ordered candidate list [application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json, application/vnd.docker.distribution.manifest.v1+prettyjws, application/vnd.docker.distribution.manifest.v1+json]" time="2025-01-11T11:31:09-05:00" level=debug msg="... will first try using the original manifest unmodified" time="2025-01-11T11:31:09-05:00" level=debug msg="Checking if we can reuse blob sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6: general substitution = true, compression for MIME type \"application/vnd.oci.image.layer.v1.tar\" = true" time="2025-01-11T11:31:09-05:00" level=debug msg="reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-11T11:31:09-05:00" level=debug msg="No compression detected" time="2025-01-11T11:31:09-05:00" level=debug msg="Using original blob without modification" time="2025-01-11T11:31:09-05:00" level=debug msg="Applying tar in /home/podman_basic_user/.local/share/containers/storage/overlay/83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6/diff" time="2025-01-11T11:31:09-05:00" level=debug msg="finished reading layer \"sha256:83362ec78cab9ea69b1fd7cce8c7ba3164dab292c6df41c7dbcb6971d43d89e6\"" time="2025-01-11T11:31:09-05:00" level=debug msg="No compression detected" time="2025-01-11T11:31:09-05:00" level=debug msg="Compression change for blob sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49 (\"application/vnd.oci.image.config.v1+json\") not supported" time="2025-01-11T11:31:09-05:00" level=debug msg="Using original blob without modification" time="2025-01-11T11:31:09-05:00" level=debug msg="setting image creation date to 2025-01-11 16:31:09.16093935 +0000 UTC" time="2025-01-11T11:31:09-05:00" level=debug msg="created new image ID \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\" with metadata \"{}\"" time="2025-01-11T11:31:09-05:00" level=debug msg="added name \"localhost/podman-pause:5.3.1-1733097600\" to image \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]localhost/podman-pause:5.3.1-1733097600\"" time="2025-01-11T11:31:09-05:00" level=debug msg="printing final image id \"5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Pod using bridge network mode" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice for parent user.slice and name libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"localhost/podman-pause:5.3.1-1733097600\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"localhost/podman-pause:5.3.1-1733097600\" as \"localhost/podman-pause:5.3.1-1733097600\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49" time="2025-01-11T11:31:09-05:00" level=debug msg="using systemd mode: false" time="2025-01-11T11:31:09-05:00" level=debug msg="setting container name cec703044d41-infra" time="2025-01-11T11:31:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded network podman-default-kube-network: &{podman-default-kube-network fb42127ceae097e95afb4c22c535aaa88e07cb9147b8dd0d6c01dc2c233a4302 bridge podman1 2025-01-11 11:31:08.992158363 -0500 EST [{{{10.89.0.0 ffffff00}} 10.89.0.1 }] [] false false true [] map[] map[] map[driver:host-local]}" time="2025-01-11T11:31:09-05:00" level=debug msg="Successfully loaded 2 networks" time="2025-01-11T11:31:09-05:00" level=debug msg="Allocated lock 1 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:5e29eea82466556190590f3bfa54c36cf146f7444d2d419150ffcc40f2526d49\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" has run directory \"/run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Pulling image quay.io/libpod/testimage:20210610 (policy: missing)" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Looking up image \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Normalized platform linux/amd64 to {amd64 linux [] }" time="2025-01-11T11:31:09-05:00" level=debug msg="Trying \"quay.io/libpod/testimage:20210610\" ..." time="2025-01-11T11:31:09-05:00" level=debug msg="parsed reference into \"[overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage" time="2025-01-11T11:31:09-05:00" level=debug msg="Found image \"quay.io/libpod/testimage:20210610\" as \"quay.io/libpod/testimage:20210610\" in local containers storage ([overlay@/home/podman_basic_user/.local/share/containers/storage+/run/user/3001/containers]@9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f)" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="Inspecting image 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f" time="2025-01-11T11:31:09-05:00" level=debug msg="using systemd mode: false" time="2025-01-11T11:31:09-05:00" level=debug msg="adding container to pod httpd1" time="2025-01-11T11:31:09-05:00" level=debug msg="setting container name httpd1-httpd1" time="2025-01-11T11:31:09-05:00" level=debug msg="Loading seccomp profile from \"/usr/share/containers/seccomp.json\"" time="2025-01-11T11:31:09-05:00" level=info msg="Sysctl net.ipv4.ping_group_range=0 0 ignored in containers.conf, since Network Namespace set to host" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /proc" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev/pts" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /dev/mqueue" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /sys" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding mount /sys/fs/cgroup" time="2025-01-11T11:31:09-05:00" level=debug msg="Allocated lock 2 for container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="exporting opaque data as blob \"sha256:9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" has work directory \"/home/podman_basic_user/.local/share/containers/storage/overlay-containers/69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" has run directory \"/run/user/3001/containers/overlay-containers/69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de/userdata\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Strongconnecting node 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Pushed 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e onto stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Finishing node 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e. Popped 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e off stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Strongconnecting node 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Pushed 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de onto stack" time="2025-01-11T11:31:09-05:00" level=debug msg="Finishing node 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de. Popped 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de off stack" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/VY45JJLRBH5N22CXKVWPXHFS6U,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c471,c842\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Made network namespace at /run/user/3001/netns/netns-1ba1aabb-9ce7-ed09-ced0-c13ed359f037 for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Mounted container \"6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created root filesystem for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e at /home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged" time="2025-01-11T11:31:09-05:00" level=debug msg="Creating rootless network namespace at \"/run/user/3001/containers/networks/rootless-netns/rootless-netns\"" time="2025-01-11T11:31:09-05:00" level=debug msg="pasta arguments: --config-net --pid /run/user/3001/containers/networks/rootless-netns/rootless-netns-conn.pid --dns-forward 169.254.1.1 -t none -u none -T none -U none --no-map-gw --quiet --netns /run/user/3001/containers/networks/rootless-netns/rootless-netns --map-guest-addr 169.254.1.2" time="2025-01-11T11:31:09-05:00" level=debug msg="The path of /etc/resolv.conf in the mount ns is \"/etc/resolv.conf\"" [DEBUG netavark::network::validation] Validating network namespace... [DEBUG netavark::commands::setup] Setting up... [INFO netavark::firewall] Using nftables firewall driver [DEBUG netavark::network::bridge] Setup network podman-default-kube-network [DEBUG netavark::network::bridge] Container interface name: eth0 with IP addresses [10.89.0.2/24] [DEBUG netavark::network::bridge] Bridge name: podman1 with IP addresses [10.89.0.1/24] [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.ip_forward to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/podman1/rp_filter to 2 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv6/conf/eth0/autoconf to 0 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/arp_notify to 1 [DEBUG netavark::network::core_utils] Setting sysctl value for /proc/sys/net/ipv4/conf/eth0/rp_filter to 2 [INFO netavark::network::netlink] Adding route (dest: 0.0.0.0/0 ,gw: 10.89.0.1, metric 100) [INFO netavark::firewall::nft] Creating container chain nv_fb42127c_10_89_0_0_nm24 [DEBUG netavark::network::core_utils] Setting sysctl value for net.ipv4.conf.podman1.route_localnet to 1 [DEBUG netavark::dns::aardvark] Spawning aardvark server [DEBUG netavark::dns::aardvark] start aardvark-dns: ["systemd-run", "-q", "--scope", "--user", "/usr/libexec/podman/aardvark-dns", "--config", "/run/user/3001/containers/networks/aardvark-dns", "-p", "53", "run"] [DEBUG netavark::commands::setup] { "podman-default-kube-network": StatusBlock { dns_search_domains: Some( [ "dns.podman", ], ), dns_server_ips: Some( [ 10.89.0.1, ], ), interfaces: Some( { "eth0": NetInterface { mac_address: "ba:4e:01:fd:9a:cc", subnets: Some( [ NetAddress { gateway: Some( 10.89.0.1, ), ipnet: 10.89.0.2/24, }, ], ), }, }, ), }, } [DEBUG netavark::commands::setup] Setup complete time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Starting parent driver\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"opaque=map[builtin.readypipepath:/run/user/3001/libpod/tmp/rootlessport1778471301/.bp-ready.pipe builtin.socketpath:/run/user/3001/libpod/tmp/rootlessport1778471301/.bp.sock]\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Starting child driver in child netns (\\\"/proc/self/exe\\\" [rootlessport-child])\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Waiting for initComplete\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=\"initComplete is closed; parent and child established the communication channel\"\ntime=\"2025-01-11T11:31:09-05:00\" level=info msg=\"Exposing ports [{ 80 15001 1 tcp}]\"\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport: time=\"2025-01-11T11:31:09-05:00\" level=info msg=Ready\n" time="2025-01-11T11:31:09-05:00" level=debug msg="rootlessport is ready" time="2025-01-11T11:31:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-11T11:31:09-05:00" level=debug msg="Setting Cgroups for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e to user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice:libpod:6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="reading hooks from /usr/share/containers/oci/hooks.d" time="2025-01-11T11:31:09-05:00" level=debug msg="Workdir \"/\" resolved to host path \"/home/podman_basic_user/.local/share/containers/storage/overlay/438cf6e5d85031b39f6e2286fb90558bcb520d90dfcaf76539b1082e932b7376/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created OCI spec for container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e at /home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/config.json" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup path user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice for parent user.slice and name libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da" time="2025-01-11T11:31:09-05:00" level=debug msg="Created cgroup user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="Got pod cgroup as user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" time="2025-01-11T11:31:09-05:00" level=debug msg="/usr/bin/conmon messages will be logged to syslog" time="2025-01-11T11:31:09-05:00" level=debug msg="running conmon: /usr/bin/conmon" args="[--api-version 1 -c 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e -u 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e -r /usr/bin/crun -b /home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata -p /run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/pidfile -n cec703044d41-infra --exit-dir /run/user/3001/libpod/tmp/exits --persist-dir /run/user/3001/libpod/tmp/persist/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e --full-attach -s -l k8s-file:/home/podman_basic_user/.local/share/containers/storage/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/ctr.log --log-level debug --syslog --conmon-pidfile /run/user/3001/containers/overlay-containers/6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e/userdata/conmon.pid --exit-command /usr/bin/podman --exit-command-arg --root --exit-command-arg /home/podman_basic_user/.local/share/containers/storage --exit-command-arg --runroot --exit-command-arg /run/user/3001/containers --exit-command-arg --log-level --exit-command-arg debug --exit-command-arg --cgroup-manager --exit-command-arg systemd --exit-command-arg --tmpdir --exit-command-arg /run/user/3001/libpod/tmp --exit-command-arg --network-config-dir --exit-command-arg --exit-command-arg --network-backend --exit-command-arg netavark --exit-command-arg --volumepath --exit-command-arg /home/podman_basic_user/.local/share/containers/storage/volumes --exit-command-arg --db-backend --exit-command-arg sqlite --exit-command-arg --transient-store=false --exit-command-arg --runtime --exit-command-arg crun --exit-command-arg --storage-driver --exit-command-arg overlay --exit-command-arg --events-backend --exit-command-arg file --exit-command-arg --syslog --exit-command-arg container --exit-command-arg cleanup --exit-command-arg --stopped-only --exit-command-arg 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e]" time="2025-01-11T11:31:09-05:00" level=info msg="Running conmon under slice user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice and unitName libpod-conmon-6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e.scope" [conmon:d]: failed to write to /proc/self/oom_score_adj: Permission denied time="2025-01-11T11:31:09-05:00" level=debug msg="Received: 24217" time="2025-01-11T11:31:09-05:00" level=info msg="Got Conmon PID as 24215" time="2025-01-11T11:31:09-05:00" level=debug msg="Created container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e in OCI runtime" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding nameserver(s) from network status of '[\"10.89.0.1\"]'" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding search domain(s) from network status of '[\"dns.podman\"]'" time="2025-01-11T11:31:09-05:00" level=debug msg="Starting container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e with command [/catatonit -P]" time="2025-01-11T11:31:09-05:00" level=debug msg="Started container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="overlay: mount_data=lowerdir=/home/podman_basic_user/.local/share/containers/storage/overlay/l/H6MLQJZURTWK5UKPYM3NBCTJTC,upperdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/diff,workdir=/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/work,userxattr,context=\"system_u:object_r:container_file_t:s0:c471,c842\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Mounted container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\" at \"/home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/merged\"" time="2025-01-11T11:31:09-05:00" level=debug msg="Created root filesystem for container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de at /home/podman_basic_user/.local/share/containers/storage/overlay/2fa690399eece911fb5509ec500fd7d2ca654be99b65c5d16219b201e4a935b1/merged" time="2025-01-11T11:31:09-05:00" level=debug msg="/proc/sys/crypto/fips_enabled does not contain '1', not adding FIPS mode bind mounts" time="2025-01-11T11:31:09-05:00" level=debug msg="Cleaning up container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Unmounted container \"69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de\"" starting container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de: cannot get namespace path unless container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e is running: container is stopped Error: failed to start 1 containers time="2025-01-11T11:31:09-05:00" level=debug msg="Shutting down engines" time="2025-01-11T11:31:09-05:00" level=info msg="Received shutdown.Stop(), terminating!" PID=24117 time="2025-01-11T11:31:09-05:00" level=debug msg="Adding parallel job to stop container 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e" time="2025-01-11T11:31:09-05:00" level=debug msg="Adding parallel job to stop container 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de" time="2025-01-11T11:31:09-05:00" level=debug msg="Stopping ctr 69655705528cd205d56c08a3a238862e03c6ed0ca137f32e8a037ee66e10b7de (timeout 10)" time="2025-01-11T11:31:09-05:00" level=debug msg="Stopping ctr 6494e2ff6f9e6c9552ee567ccbabb9c0a860226f6795e3fc26107d320571412e (timeout 10)" time="2025-01-11T11:31:09-05:00" level=debug msg="Removing pod cgroup user.slice/user-3001.slice/user@3001.service/user.slice/user-libpod_pod_cec703044d41b698edb70f2702fb0cd75b3c99d6fb620e924512450b4a7be9da.slice" Jan 11 11:31:09 managed-node2 python3.12[24110]: ansible-containers.podman.podman_play PODMAN-PLAY-KUBE rc: 125 Jan 11 11:31:09 managed-node2 sudo[24107]: pam_unix(sudo:session): session closed for user podman_basic_user Jan 11 11:31:10 managed-node2 python3.12[24369]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:10 managed-node2 python3.12[24501]: ansible-file Invoked with path=/etc/containers/storage.conf state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:11 managed-node2 python3.12[24632]: ansible-file Invoked with path=/tmp/lsr_adhv0_3l_podman state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:14 managed-node2 python3.12[24806]: ansible-ansible.legacy.setup Invoked with gather_subset=['all'] gather_timeout=10 filter=[] fact_path=/etc/ansible/facts.d Jan 11 11:31:15 managed-node2 python3.12[24966]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:15 managed-node2 python3.12[25097]: ansible-ansible.legacy.dnf Invoked with name=['python3-pyasn1', 'python3-cryptography', 'python3-dbus'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:17 managed-node2 python3.12[25240]: ansible-ansible.legacy.dnf Invoked with name=['certmonger', 'python3-packaging'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[636]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Noticed file-system modification, trigger reload. ░░ Subject: A configuration directory was written to ░░ Defined-By: dbus-broker ░░ Support: https://groups.google.com/forum/#!forum/bus1-devel ░░ ░░ A write was detected to one of the directories containing D-Bus configuration ░░ files, triggering a configuration reload. ░░ ░░ This functionality exists for backwards compatibility to pick up changes to ░░ D-Bus configuration without an explicit reolad request. Typically when ░░ installing or removing third-party software causes D-Bus configuration files ░░ to be added or removed. ░░ ░░ It is worth noting that this may cause partial configuration to be loaded in ░░ case dispatching this notification races with the writing of the configuration ░░ files. However, a future notification will then cause the configuration to be ░░ reladed again. Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +31: Eavesdropping is deprecated and ignored Jan 11 11:31:18 managed-node2 dbus-broker-launch[23403]: Policy to allow eavesdropping in /usr/share/dbus-1/session.conf +33: Eavesdropping is deprecated and ignored Jan 11 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 25259 ('systemctl') (unit session-5.scope)... Jan 11 11:31:19 managed-node2 systemd[1]: Reloading... Jan 11 11:31:19 managed-node2 systemd-rc-local-generator[25308]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:19 managed-node2 systemd-ssh-generator[25310]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:19 managed-node2 (sd-exec-[25280]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:19 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 11 11:31:19 managed-node2 systemd[1]: Started run-p25319-i25619.service - [systemd-run] /usr/bin/systemctl start man-db-cache-update. ░░ Subject: A start job for unit run-p25319-i25619.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p25319-i25619.service has finished successfully. ░░ ░░ The job identifier is 1956. Jan 11 11:31:19 managed-node2 systemd[1]: Starting man-db-cache-update.service... ░░ Subject: A start job for unit man-db-cache-update.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has begun execution. ░░ ░░ The job identifier is 2034. Jan 11 11:31:19 managed-node2 systemd[1]: Reload requested from client PID 25324 ('systemctl') (unit session-5.scope)... Jan 11 11:31:19 managed-node2 systemd[1]: Reloading... Jan 11 11:31:19 managed-node2 systemd-rc-local-generator[25372]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:19 managed-node2 systemd-ssh-generator[25375]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:19 managed-node2 (sd-exec-[25348]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:19 managed-node2 systemd[1]: Reloading finished in 348 ms. Jan 11 11:31:19 managed-node2 systemd[1]: Queuing reload/restart jobs for marked units… Jan 11 11:31:20 managed-node2 systemd[1]: man-db-cache-update.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit man-db-cache-update.service has successfully entered the 'dead' state. Jan 11 11:31:20 managed-node2 systemd[1]: Finished man-db-cache-update.service. ░░ Subject: A start job for unit man-db-cache-update.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit man-db-cache-update.service has finished successfully. ░░ ░░ The job identifier is 2034. Jan 11 11:31:20 managed-node2 systemd[1]: run-p25319-i25619.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p25319-i25619.service has successfully entered the 'dead' state. Jan 11 11:31:20 managed-node2 python3.12[25519]: ansible-file Invoked with name=/etc/certmonger//pre-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//pre-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:21 managed-node2 python3.12[25650]: ansible-file Invoked with name=/etc/certmonger//post-scripts owner=root group=root mode=0700 state=directory path=/etc/certmonger//post-scripts recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:21 managed-node2 python3.12[25781]: ansible-ansible.legacy.systemd Invoked with name=certmonger state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:31:21 managed-node2 systemd[1]: Reload requested from client PID 25784 ('systemctl') (unit session-5.scope)... Jan 11 11:31:21 managed-node2 systemd[1]: Reloading... Jan 11 11:31:22 managed-node2 systemd-rc-local-generator[25833]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:22 managed-node2 systemd-ssh-generator[25835]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:22 managed-node2 (sd-exec-[25805]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:22 managed-node2 systemd[1]: Reloading finished in 206 ms. Jan 11 11:31:22 managed-node2 systemd[1]: Starting logrotate.service - Rotate log files... ░░ Subject: A start job for unit logrotate.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has begun execution. ░░ ░░ The job identifier is 2112. Jan 11 11:31:22 managed-node2 systemd[1]: Starting certmonger.service - Certificate monitoring and PKI enrollment... ░░ Subject: A start job for unit certmonger.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has begun execution. ░░ ░░ The job identifier is 2190. Jan 11 11:31:22 managed-node2 (rtmonger)[25843]: certmonger.service: Referenced but unset environment variable evaluates to an empty string: OPTS Jan 11 11:31:22 managed-node2 systemd[1]: Started certmonger.service - Certificate monitoring and PKI enrollment. ░░ Subject: A start job for unit certmonger.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit certmonger.service has finished successfully. ░░ ░░ The job identifier is 2190. Jan 11 11:31:22 managed-node2 systemd[1]: logrotate.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit logrotate.service has successfully entered the 'dead' state. Jan 11 11:31:22 managed-node2 systemd[1]: Finished logrotate.service - Rotate log files. ░░ Subject: A start job for unit logrotate.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit logrotate.service has finished successfully. ░░ ░░ The job identifier is 2112. Jan 11 11:31:22 managed-node2 python3.12[26004]: ansible-fedora.linux_system_roles.certificate_request Invoked with name=quadlet_demo dns=['localhost'] directory=/etc/pki/tls wait=True ca=self-sign __header=# # Ansible managed # # system_role:certificate provider_config_directory=/etc/certmonger provider=certmonger key_usage=['digitalSignature', 'keyEncipherment'] extended_key_usage=['id-kp-serverAuth', 'id-kp-clientAuth'] auto_renew=True ip=None email=None common_name=None country=None state=None locality=None organization=None organizational_unit=None contact_email=None key_size=None owner=None group=None mode=None principal=None run_before=None run_after=None Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 certmonger[26019]: Certificate in file "/etc/pki/tls/certs/quadlet_demo.crt" issued by CA and saved. Jan 11 11:31:23 managed-node2 certmonger[25843]: 2025-01-11 11:31:23 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:23 managed-node2 python3.12[26150]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 11 11:31:24 managed-node2 python3.12[26281]: ansible-slurp Invoked with path=/etc/pki/tls/private/quadlet_demo.key src=/etc/pki/tls/private/quadlet_demo.key Jan 11 11:31:24 managed-node2 python3.12[26412]: ansible-slurp Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt src=/etc/pki/tls/certs/quadlet_demo.crt Jan 11 11:31:25 managed-node2 python3.12[26543]: ansible-ansible.legacy.command Invoked with _raw_params=getcert stop-tracking -f /etc/pki/tls/certs/quadlet_demo.crt _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:25 managed-node2 certmonger[25843]: 2025-01-11 11:31:25 [25843] Wrote to /var/lib/certmonger/requests/20250111163123 Jan 11 11:31:25 managed-node2 python3.12[26675]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:25 managed-node2 python3.12[26806]: ansible-file Invoked with path=/etc/pki/tls/private/quadlet_demo.key state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:26 managed-node2 python3.12[26937]: ansible-file Invoked with path=/etc/pki/tls/certs/quadlet_demo.crt state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:26 managed-node2 python3.12[27068]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:27 managed-node2 python3.12[27199]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:29 managed-node2 python3.12[27461]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:31:30 managed-node2 python3.12[27598]: ansible-getent Invoked with database=passwd key=root fail_key=False service=None split=None Jan 11 11:31:30 managed-node2 python3.12[27730]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:32 managed-node2 python3.12[27863]: ansible-stat Invoked with path=/run/ostree-booted follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:32 managed-node2 python3.12[27994]: ansible-stat Invoked with path=/sbin/transactional-update follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:33 managed-node2 python3.12[28125]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:31:34 managed-node2 python3.12[28257]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:31:34 managed-node2 python3.12[28390]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:31:35 managed-node2 python3.12[28523]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:31:36 managed-node2 python3.12[28654]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:31:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:39 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:41 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:42 managed-node2 python3.12[29261]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:43 managed-node2 python3.12[29394]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:44 managed-node2 python3.12[29525]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:44 managed-node2 python3.12[29630]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613103.8228424-13931-69007231641131/.source.network dest=/etc/containers/systemd/quadlet-demo.network owner=root group=0 mode=0644 _original_basename=quadlet-demo.network follow=False checksum=e57c08d49aff4bae8daab138d913aeddaa8682a0 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:45 managed-node2 python3.12[29761]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:45 managed-node2 systemd[1]: Reload requested from client PID 29762 ('systemctl') (unit session-5.scope)... Jan 11 11:31:45 managed-node2 systemd[1]: Reloading... Jan 11 11:31:45 managed-node2 systemd-rc-local-generator[29809]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:45 managed-node2 systemd-ssh-generator[29811]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:45 managed-node2 (sd-exec-[29784]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:45 managed-node2 systemd[1]: Reloading finished in 211 ms. Jan 11 11:31:46 managed-node2 python3.12[29949]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:46 managed-node2 systemd[1]: Starting quadlet-demo-network.service... ░░ Subject: A start job for unit quadlet-demo-network.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has begun execution. ░░ ░░ The job identifier is 2269. Jan 11 11:31:46 managed-node2 quadlet-demo-network[29953]: systemd-quadlet-demo Jan 11 11:31:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:46 managed-node2 systemd[1]: Finished quadlet-demo-network.service. ░░ Subject: A start job for unit quadlet-demo-network.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-network.service has finished successfully. ░░ ░░ The job identifier is 2269. Jan 11 11:31:47 managed-node2 python3.12[30091]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:48 managed-node2 python3.12[30224]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:48 managed-node2 python3.12[30355]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:31:49 managed-node2 python3.12[30460]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613108.4490383-14098-270882481689807/.source.volume dest=/etc/containers/systemd/quadlet-demo-mysql.volume owner=root group=0 mode=0644 _original_basename=quadlet-demo-mysql.volume follow=False checksum=585f8cbdf0ec73000f9227dcffbef71e9552ea4a backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:49 managed-node2 python3.12[30591]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:31:49 managed-node2 systemd[1]: Reload requested from client PID 30592 ('systemctl') (unit session-5.scope)... Jan 11 11:31:49 managed-node2 systemd[1]: Reloading... Jan 11 11:31:49 managed-node2 systemd-rc-local-generator[30640]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:31:49 managed-node2 systemd-ssh-generator[30642]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:31:49 managed-node2 (sd-exec-[30614]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:31:49 managed-node2 systemd[1]: Reloading finished in 203 ms. Jan 11 11:31:50 managed-node2 python3.12[30779]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:31:50 managed-node2 systemd[1]: Starting quadlet-demo-mysql-volume.service... ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has begun execution. ░░ ░░ The job identifier is 2353. Jan 11 11:31:50 managed-node2 podman[30783]: 2025-01-11 11:31:50.493230448 -0500 EST m=+0.036573186 volume create systemd-quadlet-demo-mysql Jan 11 11:31:50 managed-node2 quadlet-demo-mysql-volume[30783]: systemd-quadlet-demo-mysql Jan 11 11:31:50 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:50 managed-node2 systemd[1]: Finished quadlet-demo-mysql-volume.service. ░░ Subject: A start job for unit quadlet-demo-mysql-volume.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql-volume.service has finished successfully. ░░ ░░ The job identifier is 2353. Jan 11 11:31:51 managed-node2 python3.12[30922]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:31:52 managed-node2 python3.12[31055]: ansible-file Invoked with path=/tmp/quadlet_demo state=directory owner=root group=root mode=0777 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:31:53 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 podman[31194]: 2025-01-11 11:31:59.148246293 -0500 EST m=+5.917452742 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:31:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:31:59 managed-node2 python3.12[31503]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:00 managed-node2 python3.12[31634]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:00 managed-node2 python3.12[31739]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo-mysql.container owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613119.7690783-14333-16171380749252/.source.container _original_basename=.38c34bi8 follow=False checksum=ca62b2ad3cc9afb5b5371ebbf797b9bc4fd7edd4 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:00 managed-node2 python3.12[31870]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:00 managed-node2 systemd[1]: Reload requested from client PID 31871 ('systemctl') (unit session-5.scope)... Jan 11 11:32:00 managed-node2 systemd[1]: Reloading... Jan 11 11:32:01 managed-node2 systemd-ssh-generator[31920]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:01 managed-node2 systemd-rc-local-generator[31918]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:01 managed-node2 (sd-exec-[31893]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:01 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 11 11:32:01 managed-node2 python3.12[32058]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:32:01 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2437. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.741714223 -0500 EST m=+0.044057478 container create 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:01 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:01 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:01 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7742] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/3) Jan 11 11:32:01 managed-node2 (udev-worker)[32071]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7773] device (veth0): carrier: link connected Jan 11 11:32:01 managed-node2 (udev-worker)[32072]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7807] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/4) Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7811] device (podman2): carrier: link connected Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7961] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7988] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.7997] device (podman2): Activation: starting connection 'podman2' (23b26a8f-10cc-4b00-9b2c-ec9a1e9d7889) Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8002] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8026] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8029] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8032] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2523. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.724593778 -0500 EST m=+0.026937162 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:32:01 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2523. Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8426] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8431] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:01 managed-node2 NetworkManager[708]: [1736613121.8439] device (podman2): Activation: successful, device activated. Jan 11 11:32:01 managed-node2 systemd[1]: Started run-p32111-i32411.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p32111-i32411.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p32111-i32411.scope has finished successfully. ░░ ░░ The job identifier is 2602. Jan 11 11:32:01 managed-node2 systemd[1]: Started 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer - [systemd-run] /usr/bin/podman healthcheck run 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a. ░░ Subject: A start job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished successfully. ░░ ░░ The job identifier is 2608. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.940343178 -0500 EST m=+0.242686505 container init 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2437. Jan 11 11:32:01 managed-node2 podman[32062]: 2025-01-11 11:32:01.967641712 -0500 EST m=+0.269985061 container start 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:01 managed-node2 quadlet-demo-mysql[32062]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:02 managed-node2 systemd[1]: 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: Stopped 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer - [systemd-run] /usr/bin/podman healthcheck run 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a. ░░ Subject: A stop job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-25956b6f3af3e97d.timer has finished. ░░ ░░ The job identifier is 2764 and the job result is done. Jan 11 11:32:02 managed-node2 systemd[1]: run-p32111-i32411.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p32111-i32411.scope has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:02 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:02 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:02 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:02 managed-node2 NetworkManager[708]: [1736613122.0781] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:02 managed-node2 podman[32123]: 2025-01-11 11:32:02.141018376 -0500 EST m=+0.176764499 container remove 8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:02 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:32:02 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay-4cbc04ea4b763a54c154ff943a2c395535b18c6a70bc5aa97666b471ea7f3d25-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-4cbc04ea4b763a54c154ff943a2c395535b18c6a70bc5aa97666b471ea7f3d25-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-8b903325bf03fc5d81d55a01a08a7d92d63abe0e60618a5d24b0acede937385a-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: run-netns-netns\x2dd820430f\x2d8d6c\x2d6798\x2d05b8\x2d6d29a3fb9d17.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd820430f\x2d8d6c\x2d6798\x2d05b8\x2d6d29a3fb9d17.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:02 managed-node2 python3.12[32292]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:04 managed-node2 python3.12[32425]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:04 managed-node2 python3.12[32556]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:05 managed-node2 python3.12[32661]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613124.6292498-14522-101146869489364/.source.yml dest=/etc/containers/systemd/envoy-proxy-configmap.yml owner=root group=0 mode=0644 _original_basename=envoy-proxy-configmap.yml follow=False checksum=d681c7d56f912150d041873e880818b22a90c188 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:06 managed-node2 python3.12[32792]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:06 managed-node2 systemd[1]: Reload requested from client PID 32793 ('systemctl') (unit session-5.scope)... Jan 11 11:32:06 managed-node2 systemd[1]: Reloading... Jan 11 11:32:06 managed-node2 systemd-rc-local-generator[32841]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:06 managed-node2 systemd-ssh-generator[32843]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:06 managed-node2 (sd-exec-[32816]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:06 managed-node2 systemd[1]: Reloading finished in 208 ms. Jan 11 11:32:07 managed-node2 python3.12[32981]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:08 managed-node2 python3.12[33114]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:09 managed-node2 python3.12[33245]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:09 managed-node2 python3.12[33350]: ansible-ansible.legacy.copy Invoked with dest=/etc/containers/systemd/quadlet-demo.yml owner=root group=0 mode=0644 src=/root/.ansible/tmp/ansible-tmp-1736613129.091449-14710-252865355782244/.source.yml _original_basename=.42hwdbx1 follow=False checksum=998dccde0483b1654327a46ddd89cbaa47650370 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:10 managed-node2 python3.12[33481]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:10 managed-node2 systemd[1]: Reload requested from client PID 33482 ('systemctl') (unit session-5.scope)... Jan 11 11:32:10 managed-node2 systemd[1]: Reloading... Jan 11 11:32:10 managed-node2 systemd-rc-local-generator[33523]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:10 managed-node2 systemd-ssh-generator[33525]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:10 managed-node2 (sd-exec-[33505]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:10 managed-node2 systemd[1]: Reloading finished in 210 ms. Jan 11 11:32:11 managed-node2 python3.12[33670]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:32:12 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:32:12 managed-node2 python3.12[33804]: ansible-slurp Invoked with path=/etc/containers/systemd/quadlet-demo.yml src=/etc/containers/systemd/quadlet-demo.yml Jan 11 11:32:13 managed-node2 python3.12[33935]: ansible-file Invoked with path=/tmp/httpd3 state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:13 managed-node2 python3.12[34066]: ansible-file Invoked with path=/tmp/httpd3-create state=directory owner=root group=root recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:26 managed-node2 podman[34204]: 2025-01-11 11:32:26.627835042 -0500 EST m=+12.566683686 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 11 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:30 managed-node2 podman[34624]: 2025-01-11 11:32:30.932431571 -0500 EST m=+3.782995336 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 11 11:32:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:30 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:31 managed-node2 python3.12[34886]: ansible-file Invoked with path=/etc/containers/systemd state=directory owner=root group=0 mode=0755 recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:31 managed-node2 python3.12[35017]: ansible-ansible.legacy.stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_size=False checksum_algorithm=sha1 get_mime=True get_attributes=True Jan 11 11:32:32 managed-node2 python3.12[35122]: ansible-ansible.legacy.copy Invoked with src=/root/.ansible/tmp/ansible-tmp-1736613151.519664-15332-169452422398586/.source.kube dest=/etc/containers/systemd/quadlet-demo.kube owner=root group=0 mode=0644 _original_basename=quadlet-demo.kube follow=False checksum=7a5c73a5d935a42431c87bcdbeb8a04ed0909dc7 backup=False force=True unsafe_writes=False content=NOT_LOGGING_PARAMETER validate=None directory_mode=None remote_src=None local_follow=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:32 managed-node2 python3.12[35253]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:32:32 managed-node2 systemd[1]: Reload requested from client PID 35254 ('systemctl') (unit session-5.scope)... Jan 11 11:32:32 managed-node2 systemd[1]: Reloading... Jan 11 11:32:32 managed-node2 systemd-rc-local-generator[35298]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:32:32 managed-node2 systemd-ssh-generator[35303]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:32:32 managed-node2 (sd-exec-[35276]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:32:32 managed-node2 systemd[1]: Reloading finished in 197 ms. Jan 11 11:32:33 managed-node2 python3.12[35441]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=started daemon_reload=False daemon_reexec=False no_block=False enabled=None force=None masked=None Jan 11 11:32:33 managed-node2 systemd[1]: Starting quadlet-demo-mysql.service... ░░ Subject: A start job for unit quadlet-demo-mysql.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has begun execution. ░░ ░░ The job identifier is 2851. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.51834238 -0500 EST m=+0.044555591 container create f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:33 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5469] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/5) Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5515] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/6) Jan 11 11:32:33 managed-node2 (udev-worker)[35456]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5567] device (veth0): carrier: link connected Jan 11 11:32:33 managed-node2 (udev-worker)[35461]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5571] device (podman2): carrier: link connected Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5741] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5796] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5803] device (podman2): Activation: starting connection 'podman2' (27c70745-7559-4d0b-b7a4-cbb0ea3d03f9) Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5817] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5819] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5821] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.5825] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.500579736 -0500 EST m=+0.026793087 image pull dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:32:33 managed-node2 systemd[1]: Starting NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service... ░░ Subject: A start job for unit NetworkManager-dispatcher.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has begun execution. ░░ ░░ The job identifier is 2853. Jan 11 11:32:33 managed-node2 systemd[1]: Started NetworkManager-dispatcher.service - Network Manager Script Dispatcher Service. ░░ Subject: A start job for unit NetworkManager-dispatcher.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit NetworkManager-dispatcher.service has finished successfully. ░░ ░░ The job identifier is 2853. Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6225] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6231] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.6241] device (podman2): Activation: successful, device activated. Jan 11 11:32:33 managed-node2 systemd[1]: Started run-p35488-i35788.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35488-i35788.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35488-i35788.scope has finished successfully. ░░ ░░ The job identifier is 2932. Jan 11 11:32:33 managed-node2 systemd[1]: Started f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer - [systemd-run] /usr/bin/podman healthcheck run f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c. ░░ Subject: A start job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished successfully. ░░ ░░ The job identifier is 2938. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.694651745 -0500 EST m=+0.220865194 container init f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 systemd[1]: Started quadlet-demo-mysql.service. ░░ Subject: A start job for unit quadlet-demo-mysql.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo-mysql.service has finished successfully. ░░ ░░ The job identifier is 2851. Jan 11 11:32:33 managed-node2 systemd[1]: Starting quadlet-demo.service... ░░ Subject: A start job for unit quadlet-demo.service has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has begun execution. ░░ ░░ The job identifier is 2766. Jan 11 11:32:33 managed-node2 podman[35445]: 2025-01-11 11:32:33.722297794 -0500 EST m=+0.248511112 container start f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 quadlet-demo-mysql[35445]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Pods stopped: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Pods removed: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Secrets removed: Jan 11 11:32:33 managed-node2 quadlet-demo[35507]: Volumes removed: Jan 11 11:32:33 managed-node2 systemd[1]: f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 systemd[1]: Stopped f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer - [systemd-run] /usr/bin/podman healthcheck run f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c. ░░ Subject: A stop job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-4ddffa08d16864da.timer has finished. ░░ ░░ The job identifier is 3094 and the job result is done. Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 systemd[1]: run-p35488-i35788.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35488-i35788.scope has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:33 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:33 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.822809762 -0500 EST m=+0.087086499 volume create wp-pv-claim Jan 11 11:32:33 managed-node2 NetworkManager[708]: [1736613153.8407] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.863234275 -0500 EST m=+0.127510628 container create 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.8704762 -0500 EST m=+0.134752517 volume create envoy-certificates Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.875665706 -0500 EST m=+0.139941907 volume create envoy-proxy-config Jan 11 11:32:33 managed-node2 systemd[1]: run-netns-netns\x2dd1335f28\x2d90ee\x2d35c9\x2dbe00\x2d6395b68083bb.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2dd1335f28\x2d90ee\x2d35c9\x2dbe00\x2d6395b68083bb.mount has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 systemd[1]: Created slice machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice - cgroup machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice. ░░ Subject: A start job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished successfully. ░░ ░░ The job identifier is 3096. Jan 11 11:32:33 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:33 managed-node2 podman[35506]: 2025-01-11 11:32:33.924485995 -0500 EST m=+0.203788737 container remove f23703c6c325c57487029a3a766c75828129e02cb26c3f7eccd36ba2a0a9b26c (image=quay.io/linux-system-roles/mysql:5.6, name=quadlet-demo-mysql, PODMAN_SYSTEMD_UNIT=quadlet-demo-mysql.service) Jan 11 11:32:33 managed-node2 systemd[1]: quadlet-demo-mysql.service: Main process exited, code=exited, status=1/FAILURE ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo-mysql.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 1. Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.952835432 -0500 EST m=+0.217111792 container create dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.959026784 -0500 EST m=+0.223303095 pod create 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:33 managed-node2 systemd[1]: quadlet-demo-mysql.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:33 managed-node2 podman[35507]: 2025-01-11 11:32:33.971926754 -0500 EST m=+0.236203312 image pull fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.001880823 -0500 EST m=+0.266157145 container create 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.025570871 -0500 EST m=+0.289847097 container create bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.025879964 -0500 EST m=+0.290156194 container restart 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: Started libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope - libcrun container. ░░ Subject: A start job for unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has finished successfully. ░░ ░░ The job identifier is 3102. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.005676748 -0500 EST m=+0.269953067 image pull 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.089484908 -0500 EST m=+0.353761227 container init 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.092062096 -0500 EST m=+0.356338482 container start 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea.scope has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1059] manager: (podman2): new Bridge device (/org/freedesktop/NetworkManager/Devices/7) Jan 11 11:32:34 managed-node2 (udev-worker)[35480]: Network interface NamePolicy= disabled on kernel command line. Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:34 managed-node2 kernel: veth0: entered allmulticast mode Jan 11 11:32:34 managed-node2 kernel: veth0: entered promiscuous mode Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1209] manager: (veth0): new Veth device (/org/freedesktop/NetworkManager/Devices/8) Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered blocking state Jan 11 11:32:34 managed-node2 kernel: podman2: port 1(veth0) entered forwarding state Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1260] device (veth0): carrier: link connected Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1264] device (podman2): carrier: link connected Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1357] device (podman2): state change: unmanaged -> unavailable (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1372] device (podman2): state change: unavailable -> disconnected (reason 'connection-assumed', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1389] device (podman2): Activation: starting connection 'podman2' (134e0605-059a-48c0-82b6-94816cad0d2e) Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1391] device (podman2): state change: disconnected -> prepare (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1398] device (podman2): state change: prepare -> config (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1400] device (podman2): state change: config -> ip-config (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1439] device (podman2): state change: ip-config -> ip-check (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1521] device (podman2): state change: ip-check -> secondaries (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1524] device (podman2): state change: secondaries -> activated (reason 'none', managed-type: 'external') Jan 11 11:32:34 managed-node2 NetworkManager[708]: [1736613154.1533] device (podman2): Activation: successful, device activated. Jan 11 11:32:34 managed-node2 podman[35557]: 2025-01-11 11:32:34.183445487 -0500 EST m=+0.072742073 container died 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 systemd[1]: Started run-p35583-i35883.scope - [systemd-run] /usr/libexec/podman/aardvark-dns --config /run/containers/networks/aardvark-dns -p 53 run. ░░ Subject: A start job for unit run-p35583-i35883.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit run-p35583-i35883.scope has finished successfully. ░░ ░░ The job identifier is 3108. Jan 11 11:32:34 managed-node2 systemd[1]: Started libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope - libcrun container. ░░ Subject: A start job for unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has finished successfully. ░░ ░░ The job identifier is 3114. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.432622644 -0500 EST m=+0.696899007 container init dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.435661679 -0500 EST m=+0.699937978 container start dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:34 managed-node2 systemd[1]: libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit libpod-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971.scope has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay-f696c5390a0b653138a6cbb7e05d386e6bad58005fe7e13a67ebd2d4d3cb9772-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-f696c5390a0b653138a6cbb7e05d386e6bad58005fe7e13a67ebd2d4d3cb9772-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:34 managed-node2 podman[35507]: 2025-01-11 11:32:34.975381632 -0500 EST m=+1.239657886 container died dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:37 managed-node2 podman[35557]: 2025-01-11 11:32:37.235697756 -0500 EST m=+3.124994479 container cleanup 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:37 managed-node2 systemd[1]: run-p35583-i35883.scope: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-p35583-i35883.scope has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 kernel: veth0 (unregistering): left allmulticast mode Jan 11 11:32:37 managed-node2 kernel: veth0 (unregistering): left promiscuous mode Jan 11 11:32:37 managed-node2 kernel: podman2: port 1(veth0) entered disabled state Jan 11 11:32:37 managed-node2 NetworkManager[708]: [1736613157.2649] device (podman2): state change: activated -> unmanaged (reason 'unmanaged', managed-type: 'removed') Jan 11 11:32:37 managed-node2 systemd[1]: run-netns-netns\x2d680699bb\x2daab9\x2d0c5f\x2dd1a3\x2db50c1af0808c.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit run-netns-netns\x2d680699bb\x2daab9\x2d0c5f\x2dd1a3\x2db50c1af0808c.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay\x2dcontainers-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971-userdata-shm.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay\x2dcontainers-dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971-userdata-shm.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-0f36b4c12f263560636bfed166bcdfec6d2a4d903ee8b4175f38e620256ba1cd-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-0f36b4c12f263560636bfed166bcdfec6d2a4d903ee8b4175f38e620256ba1cd-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 podman[35592]: 2025-01-11 11:32:37.3318675 -0500 EST m=+2.694111916 container cleanup dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service, io.buildah.version=1.38.0) Jan 11 11:32:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay-8e11689c21ad0fadb74f350e962e8358d7ff4a808f195ba3097c1ccf92a7cabb-merged.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay-8e11689c21ad0fadb74f350e962e8358d7ff4a808f195ba3097c1ccf92a7cabb-merged.mount has successfully entered the 'dead' state. Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: [starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped] Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: [starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped starting container bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped] Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Volumes: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: wp-pv-claim Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Pod: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Containers: Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: starting container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: starting container bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9: cannot get namespace path unless container dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 is running: container is stopped Jan 11 11:32:37 managed-node2 quadlet-demo[35507]: Error: failed to start 2 containers Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Failed to parse MAINPID=0 field in notification message, ignoring: Numerical result out of range Jan 11 11:32:37 managed-node2 systemd[1]: Started quadlet-demo.service. ░░ Subject: A start job for unit quadlet-demo.service has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit quadlet-demo.service has finished successfully. ░░ ░░ The job identifier is 2766. Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Main process exited, code=exited, status=125/n/a ░░ Subject: Unit process exited ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ An ExecStart= process belonging to unit quadlet-demo.service has exited. ░░ ░░ The process' exit code is 'exited' and its exit status is 125. Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.438028591 -0500 EST m=+0.037306380 pod stop 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:37 managed-node2 systemd[1]: Removed slice machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice - cgroup machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice. ░░ Subject: A stop job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit machine-libpod_pod_5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2.slice has finished. ░░ ░░ The job identifier is 3121 and the job result is done. Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: time="2025-01-11T11:32:37-05:00" level=error msg="Checking whether service of container 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea can be stopped: getting exit code of container 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 from DB: no such exit code" Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.487178676 -0500 EST m=+0.086456103 container remove 135b4b266a95963c6caf7f3e042facd632a052647467303c5d11ea2b558461f7 (image=quay.io/linux-system-roles/wordpress:4.8-apache, name=quadlet-demo-wordpress, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.502328852 -0500 EST m=+0.101606262 container remove bbe25101f61727ab297c3c843614773622538172fb63fed731b3fedbff2fc3d9 (image=quay.io/linux-system-roles/envoyproxy:v1.25.0, name=quadlet-demo-envoy, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.523333084 -0500 EST m=+0.122610503 container remove dac097e402f18a5b8a08f0e0a6f0b4d9046bf60c689e231cf8aaf1edbe852971 (image=localhost/podman-pause:5.3.1-1733097600, name=5525b1d56f27-infra, pod_id=5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2, io.buildah.version=1.38.0, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.531503391 -0500 EST m=+0.130780769 pod remove 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 (image=, name=quadlet-demo) Jan 11 11:32:37 managed-node2 podman[35618]: 2025-01-11 11:32:37.551667661 -0500 EST m=+0.150945073 container remove 15ea6182725910fac1c33a76e5ea5c12fd79042b5f8ae0a2827c2387513dc8ea (image=localhost/podman-pause:5.3.1-1733097600, name=a96f3a51b8d1-service, PODMAN_SYSTEMD_UNIT=quadlet-demo.service) Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Pods stopped: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Pods removed: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: 5525b1d56f27cbcd2789982c0bf410f85a1845cc5b97666da3ba24404f995bd2 Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Secrets removed: Jan 11 11:32:37 managed-node2 quadlet-demo[35618]: Volumes removed: Jan 11 11:32:37 managed-node2 systemd[1]: quadlet-demo.service: Failed with result 'exit-code'. ░░ Subject: Unit failed ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo.service has entered the 'failed' state with result 'exit-code'. Jan 11 11:32:37 managed-node2 python3.12[35760]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/containers/systemd _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:38 managed-node2 python3.12[35892]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:38 managed-node2 python3.12[36030]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:38 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:32:39 managed-node2 python3.12[36169]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:39 managed-node2 systemd[4339]: Created slice background.slice - User Background Tasks Slice. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 14. Jan 11 11:32:39 managed-node2 systemd[4339]: Starting systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 13. Jan 11 11:32:39 managed-node2 systemd[4339]: Finished systemd-tmpfiles-clean.service - Cleanup of User's Temporary Files and Directories. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 13. Jan 11 11:32:39 managed-node2 python3.12[36310]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:32:40 managed-node2 python3.12[36444]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:45 managed-node2 python3.12[36575]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:47 managed-node2 systemd[1]: NetworkManager-dispatcher.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit NetworkManager-dispatcher.service has successfully entered the 'dead' state. Jan 11 11:32:51 managed-node2 python3.12[36707]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:32:56 managed-node2 python3.12[36838]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:01 managed-node2 python3.12[36969]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:07 managed-node2 python3.12[37100]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:12 managed-node2 python3.12[37231]: ansible-get_url Invoked with url=https://localhost:8000 dest=/run/out mode=0600 validate_certs=False force=False http_agent=ansible-httpget use_proxy=True force_basic_auth=False use_gssapi=False backup=False checksum= timeout=10 unredirected_headers=[] decompress=True use_netrc=True unsafe_writes=False url_username=None url_password=NOT_LOGGING_PARAMETER client_cert=None client_key=None headers=None tmp_dest=None ciphers=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:13 managed-node2 python3.12[37362]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:13 managed-node2 python3.12[37494]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps -a _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:13 managed-node2 systemd[22929]: Starting grub-boot-success.service - Mark boot as successful... ░░ Subject: A start job for unit UNIT has begun execution ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has begun execution. ░░ ░░ The job identifier is 65. Jan 11 11:33:13 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:13 managed-node2 systemd[22929]: Finished grub-boot-success.service - Mark boot as successful. ░░ Subject: A start job for unit UNIT has finished successfully ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A start job for unit UNIT has finished successfully. ░░ ░░ The job identifier is 65. Jan 11 11:33:14 managed-node2 python3.12[37634]: ansible-ansible.legacy.command Invoked with _raw_params=podman pod ps --ctr-ids --ctr-names --ctr-status _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:14 managed-node2 python3.12[37773]: ansible-ansible.legacy.command Invoked with _raw_params=set -euo pipefail; systemctl list-units --all | grep quadlet _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:14 managed-node2 python3.12[37907]: ansible-ansible.legacy.command Invoked with _raw_params=ls -alrtF /etc/systemd/system _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:16 managed-node2 python3.12[38170]: ansible-ansible.legacy.command Invoked with _raw_params=podman --version _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:17 managed-node2 python3.12[38307]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:19 managed-node2 python3.12[38440]: ansible-ansible.legacy.dnf Invoked with name=['firewalld'] state=present allow_downgrade=False allowerasing=False autoremove=False bugfix=False cacheonly=False disable_gpg_check=False disable_plugin=[] disablerepo=[] download_only=False enable_plugin=[] enablerepo=[] exclude=[] installroot=/ install_repoquery=True install_weak_deps=True security=False skip_broken=False update_cache=False update_only=False validate_certs=True sslverify=True lock_timeout=30 use_backend=auto best=None conf_file=None disable_excludes=None download_dir=None list=None nobest=None releasever=None Jan 11 11:33:20 managed-node2 python3.12[38572]: ansible-systemd Invoked with name=firewalld masked=False daemon_reload=False daemon_reexec=False scope=system no_block=False state=None enabled=None force=None Jan 11 11:33:21 managed-node2 python3.12[38705]: ansible-ansible.legacy.systemd Invoked with name=firewalld state=started enabled=True daemon_reload=False daemon_reexec=False scope=system no_block=False force=None masked=None Jan 11 11:33:21 managed-node2 python3.12[38838]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['8000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:33:22 managed-node2 python3.12[38969]: ansible-fedora.linux_system_roles.firewall_lib Invoked with port=['9000/tcp'] permanent=True runtime=True state=enabled __report_changed=True service=[] source_port=[] forward_port=[] rich_rule=[] source=[] interface=[] interface_pci_id=[] icmp_block=[] timeout=0 ipset_entries=[] protocol=[] helper_module=[] destination=[] firewalld_conf=None masquerade=None icmp_block_inversion=None target=None zone=None set_default_zone=None ipset=None ipset_type=None description=None short=None Jan 11 11:33:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:29 managed-node2 python3.12[39517]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:30 managed-node2 python3.12[39650]: ansible-systemd Invoked with name=quadlet-demo.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:33:30 managed-node2 systemd[1]: Reload requested from client PID 39653 ('systemctl') (unit session-5.scope)... Jan 11 11:33:30 managed-node2 systemd[1]: Reloading... Jan 11 11:33:30 managed-node2 systemd-ssh-generator[39705]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:30 managed-node2 systemd-rc-local-generator[39702]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:30 managed-node2 (sd-exec-[39675]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:30 managed-node2 systemd[1]: Reloading finished in 207 ms. Jan 11 11:33:31 managed-node2 python3.12[39840]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.kube follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:32 managed-node2 python3.12[40104]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.kube state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:32 managed-node2 python3.12[40235]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:32 managed-node2 systemd[1]: Reload requested from client PID 40236 ('systemctl') (unit session-5.scope)... Jan 11 11:33:32 managed-node2 systemd[1]: Reloading... Jan 11 11:33:32 managed-node2 systemd-rc-local-generator[40283]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:32 managed-node2 systemd-ssh-generator[40285]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:32 managed-node2 (sd-exec-[40258]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:32 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 11 11:33:33 managed-node2 python3.12[40423]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.565734545 -0500 EST m=+0.030345195 image untag f41b26d639b72cab2f92dd19cf0a7b368c0addd834cde4dfbe6ba9cc0b5e9755 localhost/podman-pause:5.3.1-1733097600 Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.556310631 -0500 EST m=+0.020921294 image remove f41b26d639b72cab2f92dd19cf0a7b368c0addd834cde4dfbe6ba9cc0b5e9755 Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.584121761 -0500 EST m=+0.048732438 image untag 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f quay.io/libpod/testimage:20210610 Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.565742963 -0500 EST m=+0.030353545 image remove 9f9ec7f2fdef9168f74e9d057f307955db14d782cff22ded51d277d74798cb2f Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.799292052 -0500 EST m=+0.263902793 image untag dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 quay.io/linux-system-roles/mysql:5.6 Jan 11 11:33:33 managed-node2 podman[40424]: 2025-01-11 11:33:33.584132696 -0500 EST m=+0.048743288 image remove dd3b2a5dcb48ff61113592ed5ddd762581be4387c7bc552375a2159422aa6bf5 Jan 11 11:33:34 managed-node2 podman[40424]: 2025-01-11 11:33:34.295337789 -0500 EST m=+0.759948416 image untag fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b quay.io/linux-system-roles/wordpress:4.8-apache Jan 11 11:33:34 managed-node2 podman[40424]: 2025-01-11 11:33:33.799307864 -0500 EST m=+0.263918565 image remove fcf3e41b8864a14d75a6d0627d3d02154e28a153aa57e8baa392cd744ffa0d0b Jan 11 11:33:34 managed-node2 podman[40424]: 2025-01-11 11:33:34.426340521 -0500 EST m=+0.890951174 image untag 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d quay.io/linux-system-roles/envoyproxy:v1.25.0 Jan 11 11:33:34 managed-node2 podman[40424]: 2025-01-11 11:33:34.29536395 -0500 EST m=+0.759974738 image remove 5af2585e22ed1562885d9407efab74010090427be79048c2cd6a226517cc1e1d Jan 11 11:33:34 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:34 managed-node2 python3.12[40562]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:35 managed-node2 python3.12[40701]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:35 managed-node2 python3.12[40839]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:35 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:36 managed-node2 python3.12[40977]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:36 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:37 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:37 managed-node2 python3.12[41392]: ansible-service_facts Invoked Jan 11 11:33:40 managed-node2 python3.12[41631]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:41 managed-node2 python3.12[41764]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:42 managed-node2 python3.12[42028]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:43 managed-node2 python3.12[42159]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:43 managed-node2 systemd[1]: Reload requested from client PID 42160 ('systemctl') (unit session-5.scope)... Jan 11 11:33:43 managed-node2 systemd[1]: Reloading... Jan 11 11:33:43 managed-node2 systemd-rc-local-generator[42208]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:43 managed-node2 systemd-ssh-generator[42212]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:43 managed-node2 (sd-exec-[42182]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:43 managed-node2 systemd[1]: Reloading finished in 195 ms. Jan 11 11:33:44 managed-node2 podman[42348]: 2025-01-11 11:33:44.163928246 -0500 EST m=+0.023484346 volume remove envoy-proxy-config Jan 11 11:33:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node2 podman[42486]: 2025-01-11 11:33:44.575564789 -0500 EST m=+0.023788705 volume remove envoy-certificates Jan 11 11:33:44 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:44 managed-node2 podman[42626]: 2025-01-11 11:33:44.950632546 -0500 EST m=+0.025134124 volume remove wp-pv-claim Jan 11 11:33:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:45 managed-node2 python3.12[42763]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:45 managed-node2 python3.12[42902]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:45 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:46 managed-node2 python3.12[43040]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:46 managed-node2 python3.12[43177]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:46 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node2 python3.12[43315]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:47 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:47 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:48 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:48 managed-node2 python3.12[43730]: ansible-service_facts Invoked Jan 11 11:33:50 managed-node2 python3.12[43969]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:52 managed-node2 python3.12[44102]: ansible-stat Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:33:53 managed-node2 python3.12[44366]: ansible-file Invoked with path=/etc/containers/systemd/envoy-proxy-configmap.yml state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:33:53 managed-node2 python3.12[44497]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:33:53 managed-node2 systemd[1]: Reload requested from client PID 44498 ('systemctl') (unit session-5.scope)... Jan 11 11:33:53 managed-node2 systemd[1]: Reloading... Jan 11 11:33:53 managed-node2 systemd-ssh-generator[44548]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:33:53 managed-node2 systemd-rc-local-generator[44546]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:33:53 managed-node2 (sd-exec-[44520]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:33:53 managed-node2 systemd[1]: Reloading finished in 196 ms. Jan 11 11:33:54 managed-node2 python3.12[44686]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:54 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:55 managed-node2 python3.12[44824]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:55 managed-node2 python3.12[44962]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:55 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:56 managed-node2 python3.12[45100]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:56 managed-node2 python3.12[45238]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:33:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:56 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:57 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:33:57 managed-node2 python3.12[45655]: ansible-service_facts Invoked Jan 11 11:34:00 managed-node2 python3.12[45894]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:01 managed-node2 python3.12[46027]: ansible-systemd Invoked with name=quadlet-demo-mysql.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:34:01 managed-node2 systemd[1]: Reload requested from client PID 46030 ('systemctl') (unit session-5.scope)... Jan 11 11:34:01 managed-node2 systemd[1]: Reloading... Jan 11 11:34:01 managed-node2 systemd-rc-local-generator[46078]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:01 managed-node2 systemd-ssh-generator[46080]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:01 managed-node2 (sd-exec-[46052]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:01 managed-node2 systemd[1]: Reloading finished in 197 ms. Jan 11 11:34:01 managed-node2 python3.12[46218]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:02 managed-node2 python3.12[46482]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.container state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:34:03 managed-node2 python3.12[46613]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:34:03 managed-node2 systemd[1]: Reload requested from client PID 46614 ('systemctl') (unit session-5.scope)... Jan 11 11:34:03 managed-node2 systemd[1]: Reloading... Jan 11 11:34:03 managed-node2 systemd-rc-local-generator[46661]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:03 managed-node2 systemd-ssh-generator[46663]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:03 managed-node2 (sd-exec-[46636]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:03 managed-node2 systemd[1]: Reloading finished in 204 ms. Jan 11 11:34:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:04 managed-node2 python3.12[46939]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:04 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:05 managed-node2 python3.12[47077]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:05 managed-node2 python3.12[47215]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:05 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:06 managed-node2 python3.12[47354]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:06 managed-node2 python3.12[47492]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:06 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:07 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:07 managed-node2 python3.12[47907]: ansible-service_facts Invoked Jan 11 11:34:10 managed-node2 python3.12[48146]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:11 managed-node2 python3.12[48279]: ansible-systemd Invoked with name=quadlet-demo-mysql-volume.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:34:11 managed-node2 systemd[1]: Reload requested from client PID 48282 ('systemctl') (unit session-5.scope)... Jan 11 11:34:11 managed-node2 systemd[1]: Reloading... Jan 11 11:34:11 managed-node2 systemd-ssh-generator[48326]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:11 managed-node2 systemd-rc-local-generator[48324]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:11 managed-node2 (sd-exec-[48304]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:11 managed-node2 systemd[1]: Reloading finished in 196 ms. Jan 11 11:34:11 managed-node2 systemd[1]: quadlet-demo-mysql-volume.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-mysql-volume.service has successfully entered the 'dead' state. Jan 11 11:34:11 managed-node2 systemd[1]: Stopped quadlet-demo-mysql-volume.service. ░░ Subject: A stop job for unit quadlet-demo-mysql-volume.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-mysql-volume.service has finished. ░░ ░░ The job identifier is 3123 and the job result is done. Jan 11 11:34:11 managed-node2 python3.12[48471]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:12 managed-node2 python3.12[48735]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo-mysql.volume state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:34:13 managed-node2 python3.12[48866]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:34:13 managed-node2 systemd[1]: Reload requested from client PID 48867 ('systemctl') (unit session-5.scope)... Jan 11 11:34:13 managed-node2 systemd[1]: Reloading... Jan 11 11:34:13 managed-node2 systemd-rc-local-generator[48915]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:13 managed-node2 (sd-exec-[48889]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:13 managed-node2 systemd-ssh-generator[48918]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:13 managed-node2 systemd[1]: Reloading finished in 198 ms. Jan 11 11:34:13 managed-node2 podman[49056]: 2025-01-11 11:34:13.998439545 -0500 EST m=+0.027769103 volume remove systemd-quadlet-demo-mysql Jan 11 11:34:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:14 managed-node2 python3.12[49194]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:14 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:15 managed-node2 python3.12[49332]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:15 managed-node2 python3.12[49470]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:15 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:15 managed-node2 python3.12[49608]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:16 managed-node2 python3.12[49746]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:16 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:17 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:17 managed-node2 python3.12[50162]: ansible-service_facts Invoked Jan 11 11:34:19 managed-node2 python3.12[50401]: ansible-stat Invoked with path=/usr/bin/getsubids follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:21 managed-node2 python3.12[50534]: ansible-systemd Invoked with name=quadlet-demo-network.service scope=system state=stopped enabled=False force=True daemon_reload=False daemon_reexec=False no_block=False masked=None Jan 11 11:34:21 managed-node2 systemd[1]: Reload requested from client PID 50537 ('systemctl') (unit session-5.scope)... Jan 11 11:34:21 managed-node2 systemd[1]: Reloading... Jan 11 11:34:21 managed-node2 systemd-ssh-generator[50586]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:21 managed-node2 (sd-exec-[50559]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:21 managed-node2 systemd-rc-local-generator[50583]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:21 managed-node2 systemd[1]: Reloading finished in 199 ms. Jan 11 11:34:21 managed-node2 systemd[1]: quadlet-demo-network.service: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit quadlet-demo-network.service has successfully entered the 'dead' state. Jan 11 11:34:21 managed-node2 systemd[1]: Stopped quadlet-demo-network.service. ░░ Subject: A stop job for unit quadlet-demo-network.service has finished ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ A stop job for unit quadlet-demo-network.service has finished. ░░ ░░ The job identifier is 3124 and the job result is done. Jan 11 11:34:21 managed-node2 python3.12[50726]: ansible-stat Invoked with path=/etc/containers/systemd/quadlet-demo.network follow=False get_checksum=True get_mime=True get_attributes=True checksum_algorithm=sha1 Jan 11 11:34:22 managed-node2 python3.12[50990]: ansible-file Invoked with path=/etc/containers/systemd/quadlet-demo.network state=absent recurse=False force=False follow=True modification_time_format=%Y%m%d%H%M.%S access_time_format=%Y%m%d%H%M.%S unsafe_writes=False _original_basename=None _diff_peek=None src=None modification_time=None access_time=None mode=None owner=None group=None seuser=None serole=None selevel=None setype=None attributes=None Jan 11 11:34:23 managed-node2 python3.12[51121]: ansible-systemd Invoked with daemon_reload=True scope=system daemon_reexec=False no_block=False name=None state=None enabled=None force=None masked=None Jan 11 11:34:23 managed-node2 systemd[1]: Reload requested from client PID 51122 ('systemctl') (unit session-5.scope)... Jan 11 11:34:23 managed-node2 systemd[1]: Reloading... Jan 11 11:34:23 managed-node2 systemd-rc-local-generator[51160]: /etc/rc.d/rc.local is not marked executable, skipping. Jan 11 11:34:23 managed-node2 systemd-ssh-generator[51162]: Failed to query local AF_VSOCK CID: Permission denied Jan 11 11:34:23 managed-node2 (sd-exec-[51144]: /usr/lib/systemd/system-generators/systemd-ssh-generator failed with exit status 1. Jan 11 11:34:23 managed-node2 systemd[1]: Reloading finished in 197 ms. Jan 11 11:34:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:24 managed-node2 python3.12[51447]: ansible-ansible.legacy.command Invoked with _raw_params=podman image prune --all -f _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:24 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:25 managed-node2 python3.12[51586]: ansible-ansible.legacy.command Invoked with _raw_params=podman images -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:25 managed-node2 python3.12[51724]: ansible-ansible.legacy.command Invoked with _raw_params=podman volume ls -n _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:25 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:25 managed-node2 python3.12[51862]: ansible-ansible.legacy.command Invoked with _raw_params=podman ps --noheading _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:26 managed-node2 python3.12[52000]: ansible-ansible.legacy.command Invoked with _raw_params=podman network ls -n -q _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:26 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:27 managed-node2 systemd[1]: var-lib-containers-storage-overlay.mount: Deactivated successfully. ░░ Subject: Unit succeeded ░░ Defined-By: systemd ░░ Support: https://access.redhat.com/support ░░ ░░ The unit var-lib-containers-storage-overlay.mount has successfully entered the 'dead' state. Jan 11 11:34:27 managed-node2 python3.12[52416]: ansible-service_facts Invoked Jan 11 11:34:29 managed-node2 python3.12[52655]: ansible-ansible.legacy.command Invoked with _raw_params=exec 1>&2 set -x set -o pipefail systemctl list-units --plain -l --all | grep quadlet || : systemctl list-unit-files --all | grep quadlet || : systemctl list-units --plain --failed -l --all | grep quadlet || : _uses_shell=True expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None Jan 11 11:34:30 managed-node2 python3.12[52793]: ansible-ansible.legacy.command Invoked with _raw_params=journalctl -ex _uses_shell=False expand_argument_vars=True stdin_add_newline=True strip_empty_ends=True argv=None chdir=None executable=None creates=None removes=None stdin=None PLAY RECAP ********************************************************************* managed-node2 : ok=409 changed=47 unreachable=0 failed=2 skipped=442 rescued=2 ignored=0 TASKS RECAP ******************************************************************** Saturday 11 January 2025 11:34:30 -0500 (0:00:00.418) 0:03:17.416 ****** =============================================================================== Check web -------------------------------------------------------------- 33.15s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:121 fedora.linux_system_roles.podman : Ensure container images are present -- 17.40s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Ensure container images are present --- 6.68s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:18 fedora.linux_system_roles.podman : Start service ------------------------ 4.49s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/create_update_quadlet_spec.yml:110 fedora.linux_system_roles.certificate : Ensure provider packages are installed --- 2.91s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:23 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.36s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 2.04s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.98s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.94s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.93s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.podman : For testing and debugging - services --- 1.93s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:197 fedora.linux_system_roles.certificate : Ensure certificate role dependencies are installed --- 1.88s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:5 Gathering Facts --------------------------------------------------------- 1.36s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/tests/podman/tests_quadlet_demo.yml:9 fedora.linux_system_roles.podman : Prune images no longer in use -------- 1.33s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:120 fedora.linux_system_roles.podman : Remove volumes ----------------------- 1.26s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/cleanup_quadlet_spec.yml:99 fedora.linux_system_roles.certificate : Slurp the contents of the files --- 1.20s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:152 fedora.linux_system_roles.podman : Gather the package facts ------------- 1.18s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/podman/tasks/main.yml:6 fedora.linux_system_roles.certificate : Ensure provider service is running --- 1.18s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:90 fedora.linux_system_roles.firewall : Configure firewall ----------------- 1.16s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/firewall/tasks/main.yml:71 fedora.linux_system_roles.certificate : Remove files -------------------- 1.14s /tmp/collections-BPh/ansible_collections/fedora/linux_system_roles/roles/certificate/tasks/main.yml:181