[WARNING]: Collection infra.leapp does not support Ansible version 2.14.18 [WARNING]: running playbook inside collection infra.leapp ansible-playbook [core 2.14.18] config file = /etc/ansible/ansible.cfg configured module search path = ['/root/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules'] ansible python module location = /usr/lib/python3.9/site-packages/ansible ansible collection location = /root/.ansible/collections:/usr/share/ansible/collections executable location = /usr/bin/ansible-playbook python version = 3.9.25 (main, Mar 9 2026, 00:00:00) [GCC 11.5.0 20240719 (Red Hat 11.5.0-14)] (/usr/bin/python3) jinja version = 3.1.2 libyaml = True Using /etc/ansible/ansible.cfg as config file Skipping callback 'default', as we already have a stdout callback. Skipping callback 'minimal', as we already have a stdout callback. Skipping callback 'oneline', as we already have a stdout callback. PLAYBOOK: tests_default.yml **************************************************** 1 plays in /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/tests_default.yml PLAY [Test] ******************************************************************** TASK [Gathering Facts] ********************************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/tests_default.yml:2 ok: [managed-node01] TASK [Test | Run role analysis] ************************************************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/tests_default.yml:9 TASK [infra.leapp.analysis : Ensure ansible_facts used by role] **************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:2 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.analysis : get_facts | Ensure ansible_facts used by role] **** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : Lock timestamped variables] *********************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:8 ok: [managed-node01] => {"ansible_facts": {"__leapp_timestamp": "2026-03-23_23-25-10"}, "changed": false} TASK [Initialize lock, logging, and common vars] ******************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:12 TASK [infra.leapp.common : init_leapp_log | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:9 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : init_leapp_log | Ensure that log directory exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:12 changed: [managed-node01] => {"changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/var/log/leapp", "secontext": "unconfined_u:object_r:var_log_t:s0", "size": 6, "state": "directory", "uid": 0} TASK [infra.leapp.common : init_leapp_log | Check for existing log file] ******* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:20 ok: [managed-node01] => {"changed": false, "stat": {"exists": false}} TASK [infra.leapp.common : init_leapp_log | Fail if log file already exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:25 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : init_leapp_log | Create new log file] *************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:34 changed: [managed-node01] => {"changed": true, "checksum": "4ed35ba22341e683a2ae3f4efe62102dcca81352", "dest": "/var/log/leapp/ansible_leapp_analysis.log", "gid": 0, "group": "root", "md5sum": "b64b17e3792fe5facfd54634e8ae98f0", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 70, "src": "/root/.ansible/tmp/ansible-tmp-1774308311.6493018-5737-211204627951958/source", "state": "file", "uid": 0} TASK [infra.leapp.common : init_leapp_log | /etc/ansible/facts.d directory exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:44 changed: [managed-node01] => {"changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/etc/ansible/facts.d", "secontext": "unconfined_u:object_r:etc_t:s0", "size": 6, "state": "directory", "uid": 0} TASK [infra.leapp.common : init_leapp_log | Capture current ansible_facts for validation after upgrade] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:52 changed: [managed-node01] => (item=/etc/ansible/facts.d/pre_ipu.fact) => {"ansible_loop_var": "item", "changed": true, "checksum": "9383e07c79f618f48c2dbef212f94ea16ab1c9eb", "dest": "/etc/ansible/facts.d/pre_ipu.fact", "gid": 0, "group": "root", "item": "/etc/ansible/facts.d/pre_ipu.fact", "md5sum": "b50a27024ca53622bad2a81092f554cc", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 13831, "src": "/root/.ansible/tmp/ansible-tmp-1774308312.7295775-5765-268461614533562/source", "state": "file", "uid": 0} changed: [managed-node01] => (item=/var/log/leapp/ansible_leapp_analysis.log) => {"ansible_loop_var": "item", "changed": true, "checksum": "9383e07c79f618f48c2dbef212f94ea16ab1c9eb", "dest": "/var/log/leapp/ansible_leapp_analysis.log", "gid": 0, "group": "root", "item": "/var/log/leapp/ansible_leapp_analysis.log", "md5sum": "b50a27024ca53622bad2a81092f554cc", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 13831, "src": "/root/.ansible/tmp/ansible-tmp-1774308313.3342285-5765-158015430296471/source", "state": "file", "uid": 0} TASK [infra.leapp.common : init_leapp_log | Capture a list of non-rhel versioned packages] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:63 ok: [managed-node01] => {"changed": false, "cmd": "set -o pipefail; export PATH=$PATH; rpm -qa | grep -ve '[\\.|+]el8' | grep -vE '^(gpg-pubkey|libmodulemd|katello-ca-consumer)' | sort", "delta": "0:00:00.839849", "end": "2026-03-23 19:25:15.145213", "failed_when_result": false, "msg": "non-zero return code", "rc": 1, "start": "2026-03-23 19:25:14.305364", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []} TASK [infra.leapp.common : init_leapp_log | Create fact with the non-rhel versioned packages list] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:77 ok: [managed-node01] => {"ansible_facts": {"non_rhel_packages": []}, "changed": false} TASK [infra.leapp.common : init_leapp_log | Capture the list of non-rhel versioned packages in a separate fact file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/init_leapp_log.yml:81 changed: [managed-node01] => (item=/etc/ansible/facts.d/non_rhel_packages.fact) => {"ansible_loop_var": "item", "changed": true, "checksum": "97d170e1550eee4afc0af065b78cda302a97674c", "dest": "/etc/ansible/facts.d/non_rhel_packages.fact", "gid": 0, "group": "root", "item": "/etc/ansible/facts.d/non_rhel_packages.fact", "md5sum": "d751713988987e9331980363e24189ce", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:etc_t:s0", "size": 2, "src": "/root/.ansible/tmp/ansible-tmp-1774308315.254503-5815-153490026859188/source", "state": "file", "uid": 0} changed: [managed-node01] => (item=/var/log/leapp/ansible_leapp_analysis.log) => {"ansible_loop_var": "item", "changed": true, "checksum": "97d170e1550eee4afc0af065b78cda302a97674c", "dest": "/var/log/leapp/ansible_leapp_analysis.log", "gid": 0, "group": "root", "item": "/var/log/leapp/ansible_leapp_analysis.log", "md5sum": "d751713988987e9331980363e24189ce", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 2, "src": "/root/.ansible/tmp/ansible-tmp-1774308315.8466098-5815-93085866387324/source", "state": "file", "uid": 0} TASK [infra.leapp.analysis : Include tasks for preupg assistant analysis] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:22 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : Include tasks for leapp preupgrade analysis] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:26 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml for managed-node01 TASK [analysis-leapp | Include pre_upgrade_update.yml] ************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:4 TASK [infra.leapp.common : pre_upgrade_update | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:3 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : pre_upgrade_update | Register with Satellite activation key] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:6 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [pre_upgrade_update | Include custom_local_repos for local_repos_pre_leapp] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:13 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : pre_upgrade_update | Install leapp packages] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:21 changed: [managed-node01] => {"changed": true, "msg": "", "rc": 0, "results": ["Installed: python3-leapp-0.20.0-1.el8_10.noarch", "Installed: policycoreutils-python-utils-2.9-26.el8_10.noarch", "Installed: leapp-0.20.0-1.el8_10.noarch", "Installed: leapp-deps-0.20.0-1.el8_10.noarch", "Installed: leapp-upgrade-el8toel9-0.23.0-1.el8_10.noarch", "Installed: leapp-upgrade-el8toel9-deps-0.23.0-1.el8_10.noarch", "Installed: systemd-container-239-82.el8_10.15.x86_64"]} TASK [infra.leapp.common : pre_upgrade_update | Get package version lock entries] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:33 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : pre_upgrade_update | Remove all package version locks] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:40 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : pre_upgrade_update | Include update-and-reboot.yml] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/pre_upgrade_update.yml:48 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : analysis-leapp | Ensure leapp log directory exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:11 ok: [managed-node01] => {"changed": false, "gid": 0, "group": "root", "mode": "0700", "owner": "root", "path": "/var/log/leapp", "secontext": "system_u:object_r:var_log_t:s0", "size": 40, "state": "directory", "uid": 0} TASK [infra.leapp.analysis : analysis-leapp | Populate leapp_answers file] ***** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:19 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [analysis-leapp | Create /etc/leapp/files/leapp_upgrade_repositories.repo] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:28 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : analysis-leapp | Leapp preupgrade report] ********* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:39 ASYNC FAILED on managed-node01: jid=j549218552299.7000 changed: [managed-node01] => {"ansible_job_id": "j549218552299.7000", "changed": true, "cmd": "set -o pipefail; export PATH=$PATH; ulimit -n 16384; leapp preupgrade --report-schema=1.2.0 2>&1 | tee -a /var/log/leapp/ansible_leapp_analysis.log\n", "delta": "0:00:37.538707", "end": "2026-03-23 19:26:07.023319", "failed_when_result": false, "finished": 1, "msg": "non-zero return code", "rc": 1, "results_file": "/root/.ansible_async/j549218552299.7000", "start": "2026-03-23 19:25:29.484612", "started": 1, "stderr": "", "stderr_lines": [], "stdout": "==> Processing phase `configuration_phase`\n====> * ipu_workflow_config\n IPU workflow config actor\n==> Processing phase `FactsCollection`\n====> * scanclienablerepo\n Produce CustomTargetRepository based on the LEAPP_ENABLE_REPOS in config.\n====> * udevadm_info\n Produces data exported by the \"udevadm info\" command.\n====> * scan_grub_device_name\n Find the name of the block devices where GRUB is located\n====> * scan_pkg_manager\n Provides data about package manager (yum/dnf)\n====> * scan_custom_repofile\n Scan the custom /etc/leapp/files/leapp_upgrade_repositories.repo repo file.\n====> * scan_custom_modifications_actor\n Collects information about files in leapp directories that have been modified or newly added.\n====> * scandasd\n In case of s390x architecture, check whether DASD is used.\n====> * transaction_workarounds\n Provides additional RPM transaction tasks based on bundled RPM packages.\n====> * scan_sap_hana\n Gathers information related to SAP HANA instances on the system.\n====> * scan_target_os_image\n Scans the provided target OS ISO image to use as a content source for the IPU, if any.\n====> * repository_mapping\n Produces message containing repository mapping based on provided file.\n====> * scan_files_for_target_userspace\n Scan the source system and identify files that will be copied into the target userspace when it is created.\n====> * xorgdrvfacts8to9\n Check the journal logs for deprecated Xorg drivers.\n====> * copy_dnf_conf_into_target_userspace\n Copy dnf.conf into target userspace\n====> * ifcfg_scanner\n Scan ifcfg files with legacy network configuration\n====> * scan_source_files\n Scan files (explicitly specified) of the source system.\n====> * network_manager_read_config\n Provides data about NetworkManager configuration.\n====> * firewalld_collect_used_object_names\n This actor reads firewalld's configuration and produces Model\n====> * scan_kernel_cmdline\n Scan the kernel command line of the booted system.\n====> * register_ruby_irb_adjustment\n Register a workaround to allow rubygem-irb's directory -> symlink conversion.\n====> * sssd_facts_8to9\n Check SSSD configuration for changes in RHEL9 and report them in model.\n====> * rpm_scanner\n Provides data about installed RPM Packages.\n====> * scanzfcp\n In case of s390x architecture, check whether ZFCP is used.\n====> * scanmemory\n Scan Memory of the machine.\n====> * scanblacklistca\n Scan the file system for distrusted CA's in the blacklist directory.\n====> * persistentnetnames\n Get network interface information for physical ethernet interfaces of the original system.\n====> * scan_subscription_manager_info\n Scans the current system for subscription manager information\n====> * persistentnetnamesdisable\n Disable systemd-udevd persistent network naming on machine with single eth0 NIC\n====> * get_enabled_modules\n Provides data about which module streams are enabled on the source system.\n====> * scan_grub_config\n Scan grub configuration files for errors.\n====> * root_scanner\n Scan the system root directory and produce a message containing\n====> * scan_source_boot_entry\n Scan the default boot entry of the source system.\n====> * scan_default_initramfs\n Scan details of the default boot entry's initramfs image.\n====> * firewalld_collect_global_config\n This actor reads firewalld's configuration and produces Model\n====> * system_facts\n Provides data about many facts from system.\n====> * scan_hybrid_image_azure\n Check if the system is using Azure hybrid image.\n====> * scancryptopolicies\n Scan information about system wide set crypto policies including:\n====> * scan_systemd_source\n Provides info about systemd on the source system\n====> * storage_scanner\n Provides data about storage settings.\n====> * check_custom_network_scripts\n Check the existence of custom network-scripts and warn user about possible\n====> * nis_scanner\n Collect information about the NIS packages configuration.\n====> * roce_scanner\n Detect active RoCE NICs on IBM Z machines.\n====> * checkrhui\n Check if system is using RHUI infrastructure (on public cloud) and send messages to\n====> * load_device_driver_deprecation_data\n Loads deprecation data for drivers and devices (PCI & CPU)\n====> * open_ssl_config_scanner\n Read an OpenSSL configuration file for further analysis.\n====> * network_manager_connection_scanner\n Scan NetworkManager connection keyfiles\n====> * read_openssh_config\n Collect information about the OpenSSH configuration.\n====> * get_installed_desktops\n Actor checks if kde or gnome desktop environments\n====> * remove_obsolete_gpg_keys\n Remove obsoleted RPM GPG keys.\n====> * scan_fips\n Determine whether the source system has FIPS enabled.\n====> * selinuxcontentscanner\n Scan the system for any SELinux customizations\n====> * trusted_gpg_keys_scanner\n Scan for trusted GPG keys.\n====> * distribution_signed_rpm_scanner\n Provide data about distribution signed & third-party RPM packages.\n====> * xfs_info_scanner\n This actor scans all mounted mountpoints for XFS information.\n====> * repositories_blacklist\n Exclude target repositories provided by Red Hat without support.\n====> * detect_kernel_drivers\n Matches all currently loaded kernel drivers against known deprecated and removed drivers.\n====> * biosdevname\n Enable biosdevname on the target RHEL system if all interfaces on the source RHEL\n====> * luks_scanner\n Provides data about active LUKS devices.\n====> * scan_source_kernel\n Scan the source system kernel.\n====> * used_repository_scanner\n Scan used enabled repositories\n====> * pci_devices_scanner\n Provides data about existing PCI Devices.\n====> * multipath_conf_read_8to9\n Read multipath configuration files and extract the necessary information\n====> * satellite_upgrade_facts\n Report which Satellite packages require updates and how to handle PostgreSQL data\n====> * ipa_scanner\n Scan system for ipa-client and ipa-server status\n====> * satellite_upgrade_services\n Reconfigure Satellite services\n====> * vdo_conversion_scanner\n Provides conversion info about VDO devices.\n====> * scancpu\n Scan CPUs of the machine.\n====> * rpm_transaction_config_tasks_collector\n Provides additional RPM transaction tasks from /etc/leapp/transaction.\n====> * scan_dynamic_linker_configuration\n Scan the dynamic linker configuration and find modifications.\n====> * pes_events_scanner\n Provides data about package events from Package Evolution Service.\n====> * setuptargetrepos\n Produces list of repositories that should be available to be used during IPU process.\n\n============================================================\n ERRORS \n============================================================\n\n2026-03-23 19:25:47.225099 [ERROR] Actor: scan_subscription_manager_info\nMessage: A subscription-manager command failed to execute\nSummary:\n Details: Command ['subscription-manager', 'release'] failed with exit code 1.\n Stderr: This system is not yet registered. Try 'subscription-manager register --help' for more information.\n Hint: Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\n Link: https://access.redhat.com/solutions/6138372\n\n============================================================\n END OF ERRORS \n============================================================\n\nDebug output written to /var/log/leapp/leapp-preupgrade.log\n\n============================================================\n REPORT OVERVIEW \n============================================================\n\nFollowing errors occurred and the upgrade cannot continue:\n 1. Actor: scan_subscription_manager_info\n Message: A subscription-manager command failed to execute\n\nHIGH and MEDIUM severity reports:\n 1. Packages available in excluded repositories will not be installed\n\nReports summary:\n Errors: 1\n Inhibitors: 0\n HIGH severity reports: 1\n MEDIUM severity reports: 0\n LOW severity reports: 0\n INFO severity reports: 1\n\nBefore continuing, review the full report below for details about discovered problems and possible remediation instructions:\n A report has been generated at /var/log/leapp/leapp-report.txt\n A report has been generated at /var/log/leapp/leapp-report.json\n\n============================================================\n END OF REPORT OVERVIEW \n============================================================\n\nAnswerfile has been generated at /var/log/leapp/answerfile", "stdout_lines": ["==> Processing phase `configuration_phase`", "====> * ipu_workflow_config", " IPU workflow config actor", "==> Processing phase `FactsCollection`", "====> * scanclienablerepo", " Produce CustomTargetRepository based on the LEAPP_ENABLE_REPOS in config.", "====> * udevadm_info", " Produces data exported by the \"udevadm info\" command.", "====> * scan_grub_device_name", " Find the name of the block devices where GRUB is located", "====> * scan_pkg_manager", " Provides data about package manager (yum/dnf)", "====> * scan_custom_repofile", " Scan the custom /etc/leapp/files/leapp_upgrade_repositories.repo repo file.", "====> * scan_custom_modifications_actor", " Collects information about files in leapp directories that have been modified or newly added.", "====> * scandasd", " In case of s390x architecture, check whether DASD is used.", "====> * transaction_workarounds", " Provides additional RPM transaction tasks based on bundled RPM packages.", "====> * scan_sap_hana", " Gathers information related to SAP HANA instances on the system.", "====> * scan_target_os_image", " Scans the provided target OS ISO image to use as a content source for the IPU, if any.", "====> * repository_mapping", " Produces message containing repository mapping based on provided file.", "====> * scan_files_for_target_userspace", " Scan the source system and identify files that will be copied into the target userspace when it is created.", "====> * xorgdrvfacts8to9", " Check the journal logs for deprecated Xorg drivers.", "====> * copy_dnf_conf_into_target_userspace", " Copy dnf.conf into target userspace", "====> * ifcfg_scanner", " Scan ifcfg files with legacy network configuration", "====> * scan_source_files", " Scan files (explicitly specified) of the source system.", "====> * network_manager_read_config", " Provides data about NetworkManager configuration.", "====> * firewalld_collect_used_object_names", " This actor reads firewalld's configuration and produces Model", "====> * scan_kernel_cmdline", " Scan the kernel command line of the booted system.", "====> * register_ruby_irb_adjustment", " Register a workaround to allow rubygem-irb's directory -> symlink conversion.", "====> * sssd_facts_8to9", " Check SSSD configuration for changes in RHEL9 and report them in model.", "====> * rpm_scanner", " Provides data about installed RPM Packages.", "====> * scanzfcp", " In case of s390x architecture, check whether ZFCP is used.", "====> * scanmemory", " Scan Memory of the machine.", "====> * scanblacklistca", " Scan the file system for distrusted CA's in the blacklist directory.", "====> * persistentnetnames", " Get network interface information for physical ethernet interfaces of the original system.", "====> * scan_subscription_manager_info", " Scans the current system for subscription manager information", "====> * persistentnetnamesdisable", " Disable systemd-udevd persistent network naming on machine with single eth0 NIC", "====> * get_enabled_modules", " Provides data about which module streams are enabled on the source system.", "====> * scan_grub_config", " Scan grub configuration files for errors.", "====> * root_scanner", " Scan the system root directory and produce a message containing", "====> * scan_source_boot_entry", " Scan the default boot entry of the source system.", "====> * scan_default_initramfs", " Scan details of the default boot entry's initramfs image.", "====> * firewalld_collect_global_config", " This actor reads firewalld's configuration and produces Model", "====> * system_facts", " Provides data about many facts from system.", "====> * scan_hybrid_image_azure", " Check if the system is using Azure hybrid image.", "====> * scancryptopolicies", " Scan information about system wide set crypto policies including:", "====> * scan_systemd_source", " Provides info about systemd on the source system", "====> * storage_scanner", " Provides data about storage settings.", "====> * check_custom_network_scripts", " Check the existence of custom network-scripts and warn user about possible", "====> * nis_scanner", " Collect information about the NIS packages configuration.", "====> * roce_scanner", " Detect active RoCE NICs on IBM Z machines.", "====> * checkrhui", " Check if system is using RHUI infrastructure (on public cloud) and send messages to", "====> * load_device_driver_deprecation_data", " Loads deprecation data for drivers and devices (PCI & CPU)", "====> * open_ssl_config_scanner", " Read an OpenSSL configuration file for further analysis.", "====> * network_manager_connection_scanner", " Scan NetworkManager connection keyfiles", "====> * read_openssh_config", " Collect information about the OpenSSH configuration.", "====> * get_installed_desktops", " Actor checks if kde or gnome desktop environments", "====> * remove_obsolete_gpg_keys", " Remove obsoleted RPM GPG keys.", "====> * scan_fips", " Determine whether the source system has FIPS enabled.", "====> * selinuxcontentscanner", " Scan the system for any SELinux customizations", "====> * trusted_gpg_keys_scanner", " Scan for trusted GPG keys.", "====> * distribution_signed_rpm_scanner", " Provide data about distribution signed & third-party RPM packages.", "====> * xfs_info_scanner", " This actor scans all mounted mountpoints for XFS information.", "====> * repositories_blacklist", " Exclude target repositories provided by Red Hat without support.", "====> * detect_kernel_drivers", " Matches all currently loaded kernel drivers against known deprecated and removed drivers.", "====> * biosdevname", " Enable biosdevname on the target RHEL system if all interfaces on the source RHEL", "====> * luks_scanner", " Provides data about active LUKS devices.", "====> * scan_source_kernel", " Scan the source system kernel.", "====> * used_repository_scanner", " Scan used enabled repositories", "====> * pci_devices_scanner", " Provides data about existing PCI Devices.", "====> * multipath_conf_read_8to9", " Read multipath configuration files and extract the necessary information", "====> * satellite_upgrade_facts", " Report which Satellite packages require updates and how to handle PostgreSQL data", "====> * ipa_scanner", " Scan system for ipa-client and ipa-server status", "====> * satellite_upgrade_services", " Reconfigure Satellite services", "====> * vdo_conversion_scanner", " Provides conversion info about VDO devices.", "====> * scancpu", " Scan CPUs of the machine.", "====> * rpm_transaction_config_tasks_collector", " Provides additional RPM transaction tasks from /etc/leapp/transaction.", "====> * scan_dynamic_linker_configuration", " Scan the dynamic linker configuration and find modifications.", "====> * pes_events_scanner", " Provides data about package events from Package Evolution Service.", "====> * setuptargetrepos", " Produces list of repositories that should be available to be used during IPU process.", "", "============================================================", " ERRORS ", "============================================================", "", "2026-03-23 19:25:47.225099 [ERROR] Actor: scan_subscription_manager_info", "Message: A subscription-manager command failed to execute", "Summary:", " Details: Command ['subscription-manager', 'release'] failed with exit code 1.", " Stderr: This system is not yet registered. Try 'subscription-manager register --help' for more information.", " Hint: Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.", " Link: https://access.redhat.com/solutions/6138372", "", "============================================================", " END OF ERRORS ", "============================================================", "", "Debug output written to /var/log/leapp/leapp-preupgrade.log", "", "============================================================", " REPORT OVERVIEW ", "============================================================", "", "Following errors occurred and the upgrade cannot continue:", " 1. Actor: scan_subscription_manager_info", " Message: A subscription-manager command failed to execute", "", "HIGH and MEDIUM severity reports:", " 1. Packages available in excluded repositories will not be installed", "", "Reports summary:", " Errors: 1", " Inhibitors: 0", " HIGH severity reports: 1", " MEDIUM severity reports: 0", " LOW severity reports: 0", " INFO severity reports: 1", "", "Before continuing, review the full report below for details about discovered problems and possible remediation instructions:", " A report has been generated at /var/log/leapp/leapp-report.txt", " A report has been generated at /var/log/leapp/leapp-report.json", "", "============================================================", " END OF REPORT OVERVIEW ", "============================================================", "", "Answerfile has been generated at /var/log/leapp/answerfile"]} TASK [infra.leapp.analysis : analysis-leapp | Include check-results-file.yml] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:57 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/check-results-file.yml for managed-node01 TASK [infra.leapp.analysis : check-results-file | Result file status] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/check-results-file.yml:2 ok: [managed-node01] => {"changed": false, "stat": {"atime": 1774308366.4400403, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "23d631d6a2379806a81c16b4b9019ce0bd9f5de8", "ctime": 1774308366.4400403, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452984986, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.4400403, "nlink": 1, "path": "/var/log/leapp/leapp-report.txt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2702, "uid": 0, "version": "1623617119", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.analysis : check-results-file | Check that result file exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/check-results-file.yml:7 ok: [managed-node01] => { "changed": false, "msg": "All assertions passed" } TASK [analysis-leapp | Run parse_leapp_report to check for inhibitors] ********* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:60 TASK [infra.leapp.common : parse_leapp_report | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:4 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : parse_leapp_report | Default upgrade_inhibited to false] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:15 ok: [managed-node01] => {"ansible_facts": {"upgrade_inhibited": false}, "changed": false} TASK [infra.leapp.common : parse_leapp_report | Fail if pre-upgrade analysis was not run] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:20 ok: [managed-node01] => {"changed": false, "failed_when_result": false, "stat": {"atime": 1774308390.515167, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "23d631d6a2379806a81c16b4b9019ce0bd9f5de8", "ctime": 1774308366.4400403, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452984986, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.4400403, "nlink": 1, "path": "/var/log/leapp/leapp-report.txt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2702, "uid": 0, "version": "1623617119", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.common : parse_leapp_report | Collect human readable report results] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:26 ok: [managed-node01] => {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} TASK [infra.leapp.common : parse_leapp_report | Collect JSON report results] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:32 ok: [managed-node01] => {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} TASK [infra.leapp.common : parse_leapp_report | Parse report results] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:38 ok: [managed-node01] => {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} TASK [infra.leapp.common : parse_leapp_report | Clear leapp_inhibitors] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:45 ok: [managed-node01] => {"ansible_facts": {"leapp_inhibitors": []}, "changed": false} TASK [infra.leapp.common : parse_leapp_report | Check for inhibitors] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:49 ok: [managed-node01] => (item={'audience': 'sysadmin', 'groups': ['error'], 'key': '7ec8269784db1bba2ac54ae438689ef397e16833', 'severity': 'high', 'summary': '{"details": "Command [\'subscription-manager\', \'release\'] failed with exit code 1.", "stderr": "This system is not yet registered. Try \'subscription-manager register --help\' for more information.\\n", "hint": "Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.", "link": "https://access.redhat.com/solutions/6138372"}', 'title': 'A subscription-manager command failed to execute', 'timeStamp': '2026-03-23T23:25:47.225850Z', 'hostname': 'managed-node01', 'actor': 'scan_subscription_manager_info', 'id': 'ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1'}) => {"ansible_facts": {"leapp_inhibitors": [{"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "timeStamp": "2026-03-23T23:25:47.225850Z", "title": "A subscription-manager command failed to execute"}], "upgrade_inhibited": true}, "ansible_loop_var": "item", "changed": false, "item": {"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "timeStamp": "2026-03-23T23:25:47.225850Z", "title": "A subscription-manager command failed to execute"}} skipping: [managed-node01] => (item={'audience': 'sysadmin', 'detail': {'remediations': [{'context': 'If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).', 'type': 'hint'}]}, 'groups': ['repository', 'failure'], 'key': '1b9132cb2362ae7830e48eee7811be9527747de8', 'severity': 'info', 'summary': 'The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.\n- codeready-builder-for-rhel-9-aarch64-rpms\n- codeready-builder-beta-for-rhel-9-s390x-rpms\n- codeready-builder-for-rhel-9-s390x-eus-rpms\n- codeready-builder-for-rhel-9-x86_64-eus-rpms\n- rhui-codeready-builder-for-rhel-9-x86_64-rhui-rpms\n- codeready-builder-for-rhel-9-ppc64le-eus-rpms\n- codeready-builder-beta-for-rhel-9-x86_64-rpms\n- codeready-builder-for-rhel-9-aarch64-eus-rpms\n- codeready-builder-for-rhel-9-s390x-rpms\n- codeready-builder-for-rhel-9-x86_64-rpms\n- codeready-builder-beta-for-rhel-9-aarch64-rpms\n- codeready-builder-beta-for-rhel-9-ppc64le-rpms\n- codeready-builder-for-rhel-9-x86_64-rhui-rpms\n- rhui-codeready-builder-for-rhel-9-aarch64-rhui-rpms\n- crb\n- codeready-builder-for-rhel-9-ppc64le-rpms\n- codeready-builder-for-rhel-9-rhui-rpms', 'title': 'Excluded target system repositories', 'timeStamp': '2026-03-23T23:26:01.912030Z', 'hostname': 'managed-node01', 'actor': 'repositories_blacklist', 'id': '22901c5058630276fa327922e563ec2555325cabbebd86a4060a1f9b8d22a246'}) => {"ansible_loop_var": "item", "changed": false, "item": {"actor": "repositories_blacklist", "audience": "sysadmin", "detail": {"remediations": [{"context": "If some of excluded repositories are still required to be used during the upgrade, execute leapp with the --enablerepo option with the repoid of the repository required to be enabled as an argument (the option can be used multiple times).", "type": "hint"}]}, "groups": ["repository", "failure"], "hostname": "managed-node01", "id": "22901c5058630276fa327922e563ec2555325cabbebd86a4060a1f9b8d22a246", "key": "1b9132cb2362ae7830e48eee7811be9527747de8", "severity": "info", "summary": "The following repositories are not supported by Red Hat and are excluded from the list of repositories used during the upgrade.\n- codeready-builder-for-rhel-9-aarch64-rpms\n- codeready-builder-beta-for-rhel-9-s390x-rpms\n- codeready-builder-for-rhel-9-s390x-eus-rpms\n- codeready-builder-for-rhel-9-x86_64-eus-rpms\n- rhui-codeready-builder-for-rhel-9-x86_64-rhui-rpms\n- codeready-builder-for-rhel-9-ppc64le-eus-rpms\n- codeready-builder-beta-for-rhel-9-x86_64-rpms\n- codeready-builder-for-rhel-9-aarch64-eus-rpms\n- codeready-builder-for-rhel-9-s390x-rpms\n- codeready-builder-for-rhel-9-x86_64-rpms\n- codeready-builder-beta-for-rhel-9-aarch64-rpms\n- codeready-builder-beta-for-rhel-9-ppc64le-rpms\n- codeready-builder-for-rhel-9-x86_64-rhui-rpms\n- rhui-codeready-builder-for-rhel-9-aarch64-rhui-rpms\n- crb\n- codeready-builder-for-rhel-9-ppc64le-rpms\n- codeready-builder-for-rhel-9-rhui-rpms", "timeStamp": "2026-03-23T23:26:01.912030Z", "title": "Excluded target system repositories"}, "skip_reason": "Conditional result was False"} skipping: [managed-node01] => (item={'audience': 'sysadmin', 'detail': {'related_resources': [{'scheme': 'package', 'title': 'jitterentropy-devel'}]}, 'groups': ['repository'], 'key': '2437e204808f987477c0e9be8e4c95b3a87a9f3e', 'severity': 'high', 'summary': '1 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled "Excluded target system repositories" for details.\nThe list of these packages:\n- jitterentropy-devel (repoid: codeready-builder-for-rhel-9-x86_64-rpms)', 'title': 'Packages available in excluded repositories will not be installed', 'timeStamp': '2026-03-23T23:26:06.289041Z', 'hostname': 'managed-node01', 'actor': 'pes_events_scanner', 'id': 'e21a3791bd26ef5e15af6e20005872f3d02be853f62c81966a752f78c3861bfe'}) => {"ansible_loop_var": "item", "changed": false, "item": {"actor": "pes_events_scanner", "audience": "sysadmin", "detail": {"related_resources": [{"scheme": "package", "title": "jitterentropy-devel"}]}, "groups": ["repository"], "hostname": "managed-node01", "id": "e21a3791bd26ef5e15af6e20005872f3d02be853f62c81966a752f78c3861bfe", "key": "2437e204808f987477c0e9be8e4c95b3a87a9f3e", "severity": "high", "summary": "1 packages will be skipped because they are available only in target system repositories that are intentionally excluded from the list of repositories used during the upgrade. See the report message titled \"Excluded target system repositories\" for details.\nThe list of these packages:\n- jitterentropy-devel (repoid: codeready-builder-for-rhel-9-x86_64-rpms)", "timeStamp": "2026-03-23T23:26:06.289041Z", "title": "Packages available in excluded repositories will not be installed"}, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : parse_leapp_report | Collect inhibitors] ************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:61 ok: [managed-node01] => {"changed": false, "cmd": ["awk", "/\\(inhibitor\\)/,/^-------/", "/var/log/leapp/leapp-report.txt"], "delta": "0:00:00.003537", "end": "2026-03-23 19:26:32.190739", "failed_when_result": false, "msg": "", "rc": 0, "start": "2026-03-23 19:26:32.187202", "stderr": "", "stderr_lines": [], "stdout": "", "stdout_lines": []} TASK [infra.leapp.common : parse_leapp_report | Collect high errors] *********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/parse_leapp_report.yml:70 ok: [managed-node01] => {"changed": false, "cmd": ["awk", "/high \\(error\\)/,/^-------/", "/var/log/leapp/leapp-report.txt"], "delta": "0:00:00.003247", "end": "2026-03-23 19:26:32.508601", "failed_when_result": false, "msg": "", "rc": 0, "start": "2026-03-23 19:26:32.505354", "stderr": "", "stderr_lines": [], "stdout": "Risk Factor: high (error)\nTitle: A subscription-manager command failed to execute\nSummary: {\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}\nKey: 7ec8269784db1bba2ac54ae438689ef397e16833\n----------------------------------------", "stdout_lines": ["Risk Factor: high (error)", "Title: A subscription-manager command failed to execute", "Summary: {\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "Key: 7ec8269784db1bba2ac54ae438689ef397e16833", "----------------------------------------"]} TASK [infra.leapp.analysis : analysis-leapp | Create remediation hostvars] ***** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:65 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml for managed-node01 TASK [infra.leapp.analysis : create_remediation_hostvars | Clear remediation_todo] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:2 ok: [managed-node01] => {"ansible_facts": {"leapp_remediation_todo": []}, "changed": false} TASK [infra.leapp.analysis : create_remediation_hostvars | Map inhibitors to remediation_todo] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:6 skipping: [managed-node01] => (item={'audience': 'sysadmin', 'groups': ['error'], 'key': '7ec8269784db1bba2ac54ae438689ef397e16833', 'severity': 'high', 'summary': '{"details": "Command [\'subscription-manager\', \'release\'] failed with exit code 1.", "stderr": "This system is not yet registered. Try \'subscription-manager register --help\' for more information.\\n", "hint": "Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.", "link": "https://access.redhat.com/solutions/6138372"}', 'title': 'A subscription-manager command failed to execute', 'timeStamp': '2026-03-23T23:25:47.225850Z', 'hostname': 'managed-node01', 'actor': 'scan_subscription_manager_info', 'id': 'ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1'}) => {"ansible_loop_var": "inhibitor", "changed": false, "inhibitor": {"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "timeStamp": "2026-03-23T23:25:47.225850Z", "title": "A subscription-manager command failed to execute"}, "skip_reason": "Conditional result was False"} skipping: [managed-node01] => {"changed": false, "msg": "All items skipped"} TASK [infra.leapp.analysis : create_remediation_hostvars | Ensure host_vars directory exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:33 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : create_remediation_hostvars | Check if host_vars file exists] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:39 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : create_remediation_hostvars | Read existing host_vars file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:44 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : create_remediation_hostvars | Write host_vars file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:50 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.analysis : create_remediation_hostvars | Create remediation playbook file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/create_remediation_hostvars.yml:67 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [analysis-leapp | Include custom_local_repos for local_repos_post_analysis] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:70 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [analysis-leapp | Restore original Satellite activation key] ************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:80 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [analysis-leapp | Copy reports to the controller] ************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/analysis-leapp.yml:91 TASK [infra.leapp.common : copy_reports_to_controller | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_reports_to_controller.yml:20 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : copy_reports_to_controller | Ensure reports directory on controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_reports_to_controller.yml:23 changed: [managed-node01 -> localhost] => {"changed": true, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 6, "state": "directory", "uid": 0} TASK [infra.leapp.common : copy_reports_to_controller | Fetch report files if they exist] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_reports_to_controller.yml:33 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml for managed-node01 => (item=/var/log/leapp/leapp-report.txt) included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml for managed-node01 => (item=/var/log/leapp/leapp-report.json) included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml for managed-node01 => (item=/var/log/leapp/leapp-preupgrade.log) TASK [infra.leapp.common : fetch_file_if_exists | Check if file exists] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:7 ok: [managed-node01] => {"changed": false, "stat": {"atime": 1774308390.515167, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "23d631d6a2379806a81c16b4b9019ce0bd9f5de8", "ctime": 1774308366.4400403, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452984986, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.4400403, "nlink": 1, "path": "/var/log/leapp/leapp-report.txt", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 2702, "uid": 0, "version": "1623617119", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.common : fetch_file_if_exists | Copy report file to the controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:12 changed: [managed-node01] => {"changed": true, "checksum": "23d631d6a2379806a81c16b4b9019ce0bd9f5de8", "dest": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10/managed-node01/leapp-report.txt", "md5sum": "f238ff686cf063778d5b52839481c6a6", "remote_checksum": "23d631d6a2379806a81c16b4b9019ce0bd9f5de8", "remote_md5sum": null} TASK [infra.leapp.common : fetch_file_if_exists | Check if file exists] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:7 ok: [managed-node01] => {"changed": false, "stat": {"atime": 1774308391.7491734, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 8, "charset": "us-ascii", "checksum": "39f4bdf9b3aadb52100b777ad7b00299e3191dd5", "ctime": 1774308366.4410403, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452984987, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.4410403, "nlink": 1, "path": "/var/log/leapp/leapp-report.json", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 4007, "uid": 0, "version": "15801170", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.common : fetch_file_if_exists | Copy report file to the controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:12 changed: [managed-node01] => {"changed": true, "checksum": "39f4bdf9b3aadb52100b777ad7b00299e3191dd5", "dest": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10/managed-node01/leapp-report.json", "md5sum": "687f8417554853fc98e9efca7d398c36", "remote_checksum": "39f4bdf9b3aadb52100b777ad7b00299e3191dd5", "remote_md5sum": null} TASK [infra.leapp.common : fetch_file_if_exists | Check if file exists] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:7 ok: [managed-node01] => {"changed": false, "stat": {"atime": 1774308330.6729012, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 3152, "charset": "us-ascii", "checksum": "bcf5e809aa35d1dccdd6b7c76140f1cf165f8d45", "ctime": 1774308366.4420404, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 452984983, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.4420404, "nlink": 1, "path": "/var/log/leapp/leapp-preupgrade.log", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 1610738, "uid": 0, "version": "3271614050", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.common : fetch_file_if_exists | Copy report file to the controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/fetch_file_if_exists.yml:12 changed: [managed-node01] => {"changed": true, "checksum": "bcf5e809aa35d1dccdd6b7c76140f1cf165f8d45", "dest": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10/managed-node01/leapp-preupgrade.log", "md5sum": "3e1ef317c170659b8649ac9025ae0dda", "remote_checksum": "bcf5e809aa35d1dccdd6b7c76140f1cf165f8d45", "remote_md5sum": null} TASK [copy_reports_to_controller | Copy log file to the controller] ************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_reports_to_controller.yml:42 TASK [infra.leapp.common : copy_archive_leapp_log | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:11 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : copy_archive_leapp_log | Check for log file] ******** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:14 ok: [managed-node01] => {"changed": false, "stat": {"atime": 1774308316.3729103, "attr_flags": "", "attributes": [], "block_size": 4096, "blocks": 24, "charset": "us-ascii", "checksum": "8edba146f8ad7f23ab0710de1c27b07a3ad84671", "ctime": 1774308366.9700432, "dev": 51715, "device_type": 0, "executable": false, "exists": true, "gid": 0, "gr_name": "root", "inode": 629145736, "isblk": false, "ischr": false, "isdir": false, "isfifo": false, "isgid": false, "islnk": false, "isreg": true, "issock": false, "isuid": false, "mimetype": "text/plain", "mode": "0644", "mtime": 1774308366.9700432, "nlink": 1, "path": "/var/log/leapp/ansible_leapp_analysis.log", "pw_name": "root", "readable": true, "rgrp": true, "roth": true, "rusr": true, "size": 9392, "uid": 0, "version": "22414458", "wgrp": false, "woth": false, "writeable": true, "wusr": true, "xgrp": false, "xoth": false, "xusr": false}} TASK [infra.leapp.common : copy_archive_leapp_log | Add end time to log file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:22 changed: [managed-node01] => {"backup": "", "changed": true, "msg": "line added"} TASK [infra.leapp.common : copy_archive_leapp_log | Slurp file /var/log/leapp/ansible_leapp_analysis.log] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:30 ok: [managed-node01] => {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} TASK [infra.leapp.common : copy_archive_leapp_log | Decode file /var/log/leapp/ansible_leapp_analysis.log] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:36 ok: [managed-node01] => {"censored": "the output has been hidden due to the fact that 'no_log: true' was specified for this result", "changed": false} TASK [infra.leapp.common : copy_archive_leapp_log | Ensure reports directory on controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:41 ok: [managed-node01 -> localhost] => {"changed": false, "gid": 0, "group": "root", "mode": "0755", "owner": "root", "path": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10", "secontext": "unconfined_u:object_r:admin_home_t:s0", "size": 28, "state": "directory", "uid": 0} TASK [infra.leapp.common : copy_archive_leapp_log | Copy ansible leapp log to the controller] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:51 changed: [managed-node01] => {"changed": true, "checksum": "975767e41570ec6c259e4cc1641ceec80c55c742", "dest": "/root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/ansible_leapp_analysis_logs_2026-03-23_23-25-10/managed-node01/ansible_leapp_analysis.log", "md5sum": "2cbb8209ec367721e9d06d5d7d104921", "remote_checksum": "975767e41570ec6c259e4cc1641ceec80c55c742", "remote_md5sum": null} TASK [infra.leapp.common : copy_archive_leapp_log | Copy log file to timestamped location] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:57 changed: [managed-node01] => {"changed": true, "checksum": "975767e41570ec6c259e4cc1641ceec80c55c742", "dest": "/var/log/leapp/ansible_leapp_analysis_2026-03-23_23-25-10.log", "gid": 0, "group": "root", "md5sum": "2cbb8209ec367721e9d06d5d7d104921", "mode": "0644", "owner": "root", "secontext": "system_u:object_r:var_log_t:s0", "size": 9426, "src": "/var/log/leapp/ansible_leapp_analysis.log", "state": "file", "uid": 0} TASK [infra.leapp.common : copy_archive_leapp_log | Remove original log file] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/copy_archive_leapp_log.yml:69 changed: [managed-node01] => {"changed": true, "path": "/var/log/leapp/ansible_leapp_analysis.log", "state": "absent"} TASK [infra.leapp.analysis : Set stats for leapp_inhibitors] ******************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:30 ok: [managed-node01] => {"ansible_stats": {"aggregate": true, "data": {"leapp_inhibitors": [{"actor": "scan_subscription_manager_info", "audience": "sysadmin", "groups": ["error"], "hostname": "managed-node01", "id": "ad78a020d7cb7a98e2dfde173630c281505d2b62125f85c6bea612fcff7a31f1", "key": "7ec8269784db1bba2ac54ae438689ef397e16833", "severity": "high", "summary": "{\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "timeStamp": "2026-03-23T23:25:47.225850Z", "title": "A subscription-manager command failed to execute"}]}, "per_host": false}, "changed": false} TASK [infra.leapp.analysis : Notify analysis report is done handler] *********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tasks/main.yml:35 NOTIFIED HANDLER infra.leapp.analysis : Preupgrade analysis report is done for managed-node01 NOTIFIED HANDLER infra.leapp.analysis : Display inhibitors for managed-node01 NOTIFIED HANDLER infra.leapp.analysis : Display errors for managed-node01 changed: [managed-node01] => {"changed": true, "msg": "All assertions passed"} TASK [Test | Include cleanup logs] ********************************************* task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/tests/tests_default.yml:13 TASK [infra.leapp.common : cleanup_logs | Ensure ansible_facts used by role] *** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/cleanup_logs.yml:2 included: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml for managed-node01 TASK [infra.leapp.common : get_facts | Ensure ansible_facts used by role] ****** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/get_facts.yml:3 skipping: [managed-node01] => {"changed": false, "skip_reason": "Conditional result was False"} TASK [infra.leapp.common : cleanup_logs | Cleanup | Remove log files] ********** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/common/tasks/cleanup_logs.yml:5 changed: [managed-node01] => {"changed": true, "cmd": "set -euxo pipefail\nrm -f /var/log/leapp/*.log\nrm -f /var/log/leapp/*.json\nrm -f /var/log/leapp/*.txt\n", "delta": "0:00:00.005771", "end": "2026-03-23 19:26:38.335714", "msg": "", "rc": 0, "start": "2026-03-23 19:26:38.329943", "stderr": "+ rm -f /var/log/leapp/ansible_leapp_analysis_2026-03-23_23-25-10.log /var/log/leapp/leapp-preupgrade.log\n+ rm -f /var/log/leapp/leapp-report.json\n+ rm -f /var/log/leapp/leapp-report.txt", "stderr_lines": ["+ rm -f /var/log/leapp/ansible_leapp_analysis_2026-03-23_23-25-10.log /var/log/leapp/leapp-preupgrade.log", "+ rm -f /var/log/leapp/leapp-report.json", "+ rm -f /var/log/leapp/leapp-report.txt"], "stdout": "", "stdout_lines": []} RUNNING HANDLER [infra.leapp.analysis : Display inhibitors] ******************** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/handlers/main.yml:10 skipping: [managed-node01] => {} RUNNING HANDLER [infra.leapp.analysis : Display errors] ************************ task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/handlers/main.yml:18 ok: [managed-node01] => { "results_errors.stdout_lines": [ "Risk Factor: high (error)", "Title: A subscription-manager command failed to execute", "Summary: {\"details\": \"Command ['subscription-manager', 'release'] failed with exit code 1.\", \"stderr\": \"This system is not yet registered. Try 'subscription-manager register --help' for more information.\\n\", \"hint\": \"Please ensure you have a valid RHEL subscription and your network is up. If you are using proxy for Red Hat subscription-manager, please make sure it is specified inside the /etc/rhsm/rhsm.conf file. Or use the --no-rhsm option when running leapp, if you do not want to use subscription-manager for the in-place upgrade and you want to deliver all target repositories by yourself or using RHUI on public cloud.\", \"link\": \"https://access.redhat.com/solutions/6138372\"}", "Key: 7ec8269784db1bba2ac54ae438689ef397e16833", "----------------------------------------" ] } RUNNING HANDLER [infra.leapp.analysis : Preupgrade analysis report is done] **** task path: /root/.ansible/collections/ansible_collections/infra/leapp/roles/analysis/handlers/main.yml:26 ok: [managed-node01] => { "msg": "The preupgrade analysis report generation is now complete. WARNING: Inhibitors found. Review the tasks above or the result file at /var/log/leapp/leapp-report.txt." } PLAY RECAP ********************************************************************* managed-node01 : ok=58 changed=17 unreachable=0 failed=0 skipped=25 rescued=0 ignored=0