diff --git a/.ansible-lint b/.ansible-lint index ff93a8f..49b8fbe 100644 --- a/.ansible-lint +++ b/.ansible-lint @@ -1,16 +1,24 @@ --- -# Collection wide lint-file -# DO NOT CHANGE +## Collection wide ansible-lint configuration file. +# Changes for ansible-lint v25.7.0+ +# - Always executed from collection root using collection configuration. +# - .ansible-lint-ignore can be used to ignore files, not folders. +## Execution examples: +# ansible-lint +# ansible-lint roles/sap_swpm +# ansible-lint roles/sap_install_media_detect -c roles/sap_install_media_detect/.ansible-lint + exclude_paths: - .ansible/ - .cache/ - .github/ - # - docs/ - changelogs/ - playbooks/ - tests/ + enable_list: - yaml + skip_list: # We don't want to enforce new Ansible versions for Galaxy: - meta-runtime[unsupported-version] @@ -22,3 +30,11 @@ skip_list: - schema # Allow templating inside name because it creates more detailed output: - name[template] + + # - command-instead-of-module + # - command-instead-of-shell + # - line-length + # - risky-shell-pipe + # - no-changed-when + # - no-handler + # - ignore-errors diff --git a/.github/workflows/.ansible-lint b/.github/workflows/.ansible-lint deleted file mode 100644 index 69435ba..0000000 --- a/.github/workflows/.ansible-lint +++ /dev/null @@ -1,10 +0,0 @@ ---- - -skip_list: - - command-instead-of-module - - command-instead-of-shell - - line-length - - risky-shell-pipe - - no-changed-when - - no-handler - - ignore-errors diff --git a/.github/workflows/ansible-lint-sap_software_download.yml b/.github/workflows/ansible-lint-sap_software_download.yml new file mode 100644 index 0000000..76eee1e --- /dev/null +++ b/.github/workflows/ansible-lint-sap_software_download.yml @@ -0,0 +1,36 @@ +--- +name: Ansible Lint - sap_software_download + +on: + push: + branches: + - main + - dev + paths: + - 'roles/sap_software_download/**' + pull_request: + branches: + - main + - dev + paths: + - 'roles/sap_software_download/**' + + workflow_dispatch: + +jobs: + ansible-lint: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v5 + + # Use @v25 to automatically track the latest release from the year 2025. + # ansible-lint uses Calendar Versioning (e.g., v25.9.0 -> YYYY.MM.PATCH). + # Avoid using @main, which can introduce breaking changes unexpectedly. + - uses: ansible/ansible-lint@v25 + with: + # v25.7.0 no longer uses 'working_directory' and role path is set in 'args'. + # Role specific .ansible-lint can be added with argument '-c'. + args: roles/sap_software_download + # Use the shared requirements file from the collection root for dependency context. + requirements_file: ./requirements.yml diff --git a/.github/workflows/ansible-lint.yml b/.github/workflows/ansible-lint.yml index b55e812..ba2473f 100644 --- a/.github/workflows/ansible-lint.yml +++ b/.github/workflows/ansible-lint.yml @@ -1,14 +1,24 @@ -name: Ansible Lint +--- +name: Ansible Lint - Collection -on: [push, pull_request] +on: + schedule: + # This is 03:05 UTC, which is 5:05 AM in Prague/CEST. + - cron: '5 3 * * 1' + + workflow_dispatch: jobs: ansible-lint: - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - - name: Ansible Lint Action - uses: ansible/ansible-lint@v6 + # Use @v25 to automatically track the latest release from the year 2025. + # ansible-lint uses Calendar Versioning (e.g., v25.9.0 -> YYYY.MM.PATCH). + # Avoid using @main, which can introduce breaking changes unexpectedly. + - uses: ansible/ansible-lint@v25 + with: + # Use the shared requirements file from the collection root for dependency context. + requirements_file: ./requirements.yml diff --git a/.github/workflows/ansible-test-sanity.yml b/.github/workflows/ansible-test-sanity.yml new file mode 100644 index 0000000..aa249b9 --- /dev/null +++ b/.github/workflows/ansible-test-sanity.yml @@ -0,0 +1,84 @@ +--- +# Always check ansible-core support matrix before configuring units matrix. +# https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix + +name: Ansible Test - Sanity + +on: + schedule: + # This is 01:05 UTC, which is 3:05 AM in Prague/CEST + - cron: '5 3 * * 1' + + pull_request: + branches: + - main + - dev + + workflow_dispatch: + +jobs: + sanity-supported: + runs-on: ubuntu-latest + name: Sanity (Supported Ⓐ${{ matrix.ansible }}) + strategy: + fail-fast: false # Disabled so we can see all failed combinations. + # Define a build matrix to test compatibility across multiple Ansible versions. + # Each version listed below will spawn a separate job that runs in parallel. + matrix: + ansible: + - 'stable-2.18' # Python 3.11 - 3.13 + - 'stable-2.19' # Python 3.11 - 3.13 + - 'devel' # Test against the upcoming development version. + + steps: + - uses: actions/checkout@v5 + + - name: ansible-test - sanity + uses: ansible-community/ansible-test-gh-action@release/v1 + with: + ansible-core-version: ${{ matrix.ansible }} + testing-type: sanity + + sanity-eol: + runs-on: ubuntu-latest + # This job only runs if the supported tests pass + needs: sanity-supported + name: Sanity (EOL Ⓐ${{ matrix.ansible }}+py${{ matrix.python }}) + continue-on-error: true # This entire job is allowed to fail + strategy: + fail-fast: false # Disabled so we can see all failed combinations. + # Define a build matrix to test compatibility across multiple Ansible versions. + # Each version listed below will spawn a separate job that runs in parallel. + matrix: + ansible: + - 'stable-2.14' # Python 3.9 - 3.11 + - 'stable-2.15' # Python 3.9 - 3.11 + - 'stable-2.16' # Python 3.10 - 3.12 + - 'stable-2.17' # Python 3.10 - 3.12 + python: + - '3.9' + - '3.10' + - '3.11' + - '3.12' + exclude: + # Exclusions for incompatible Python versions. + - ansible: 'stable-2.14' + python: '3.12' + + - ansible: 'stable-2.15' + python: '3.12' + + - ansible: 'stable-2.16' + python: '3.9' + + - ansible: 'stable-2.17' + python: '3.9' + steps: + - uses: actions/checkout@v5 + + - name: ansible-test - sanity + uses: ansible-community/ansible-test-gh-action@release/v1 + with: + ansible-core-version: ${{ matrix.ansible }} + target-python-version: ${{ matrix.python }} + testing-type: sanity diff --git a/README.md b/README.md index 52724e5..536d26c 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ When an SAP User ID (e.g. S-User) is enabled with and part of an SAP Universal I - the SAP User ID - the password for login with the SAP Universal ID -In addition, if a SAP Universal ID is used then the recommendation is to check and reset the SAP User ID ‘Account Password’ in the [SAP Universal ID Account Manager](https://account.sap.com/manage/accounts), which will help to avoid any potential conflicts. +In addition, if a SAP Universal ID is used then the recommendation is to check and reset the SAP User ID `Account Password` in the [SAP Universal ID Account Manager](https://account.sap.com/manage/accounts), which will help to avoid any potential conflicts. For further information regarding connection errors, please see the FAQ section [Errors with prefix 'SAP SSO authentication failed - '](./docs/FAQ.md#errors-with-prefix-sap-sso-authentication-failed---). diff --git a/docs/FAQ.md b/docs/FAQ.md index ca093b9..bbe4c84 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -20,7 +20,7 @@ The error HTTP 401 refers to either: - Unauthorized, the SAP User ID being used belongs to an SAP Company Number (SCN) with one or more Installation Number/s which do not have license agreements for these files - Unauthorized, the SAP User ID being used does not have SAP Download authorizations - Unauthorized, the SAP User ID is part of an SAP Universal ID and must use the password of the SAP Universal ID - - In addition, if a SAP Universal ID is used then the recommendation is to check and reset the SAP User ID ‘Account Password’ in the [SAP Universal ID Account Manager](https://account.sap.com/manage/accounts), which will help to avoid any potential conflicts. + - In addition, if a SAP Universal ID is used then the recommendation is to check and reset the SAP User ID `Account Password` in the [SAP Universal ID Account Manager](https://account.sap.com/manage/accounts), which will help to avoid any potential conflicts. This is documented under [Execution - Credentials](https://github.com/sap-linuxlab/community.sap_launchpad#requirements-dependencies-and-testing). diff --git a/plugins/module_utils/auth.py b/plugins/module_utils/auth.py index 5c26609..cf78e47 100644 --- a/plugins/module_utils/auth.py +++ b/plugins/module_utils/auth.py @@ -1,16 +1,56 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import json import re +from functools import wraps from urllib.parse import parse_qs, quote_plus, urljoin -from bs4 import BeautifulSoup -from requests.models import HTTPError from . import constants as C from . import exceptions +try: + from bs4 import BeautifulSoup +except ImportError: + HAS_BS4 = False + BeautifulSoup = None +else: + HAS_BS4 = True + +try: + from requests.models import HTTPError +except ImportError: + HAS_REQUESTS = False + HTTPError = None +else: + HAS_REQUESTS = True + _GIGYA_SDK_BUILD_NUMBER = None +def require_bs4(func): + # A decorator to check for the 'beautifulsoup4' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_BS4: + raise ImportError("The 'beautifulsoup4' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + +def require_requests(func): + # A decorator to check for the 'requests' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_REQUESTS: + raise ImportError("The 'requests' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + +@require_requests +@require_bs4 def login(client, username, password): # Main authentication function. # @@ -57,18 +97,20 @@ def login(client, username, password): 'samlContext': params['samlContext'] } endpoint, meta = get_sso_endpoint_meta(client, idp_endpoint, - params=context, - allow_redirects=False) + params=context, + allow_redirects=False) while (endpoint != C.URL_LAUNCHPAD + '/'): endpoint, meta = get_sso_endpoint_meta(client, endpoint, - data=meta, - headers=C.GIGYA_HEADERS, - allow_redirects=False) + data=meta, + headers=C.GIGYA_HEADERS, + allow_redirects=False) client.post(endpoint, data=meta, headers=C.GIGYA_HEADERS) +@require_requests +@require_bs4 def get_sso_endpoint_meta(client, url, **kwargs): # Scrapes an HTML page to find the next SSO form action URL and its input fields. method = 'POST' if kwargs.get('data') or kwargs.get('json') else 'GET' @@ -87,8 +129,7 @@ def get_sso_endpoint_meta(client, url, **kwargs): form = soup.find('form') if not form: - raise ValueError( - f'Unable to find form: {res.url}\nContent:\n{res.text}') + raise ValueError('Unable to find form: {0}\nContent:\n{1}'.format(res.url, res.text)) inputs = form.find_all('input') endpoint = urljoin(res.url, form['action']) @@ -100,6 +141,7 @@ def get_sso_endpoint_meta(client, url, **kwargs): return (endpoint, metadata) +@require_requests def _get_gigya_login_params(client, url, data): # Follows a redirect and extracts parameters from the resulting URL's query string. gigya_idp_res = client.post(url, data=data) @@ -109,9 +151,10 @@ def _get_gigya_login_params(client, url, data): return params +@require_requests def _gigya_websdk_bootstrap(client, params): # Performs the initial bootstrap call to the Gigya WebSDK. - page_url = f'{C.URL_ACCOUNT_SAML_PROXY}?apiKey=' + params['apiKey'], + page_url = C.URL_ACCOUNT_SAML_PROXY + '?apiKey=' + params['apiKey'] params.update({ 'pageURL': page_url, 'sdk': 'js_latest', @@ -120,10 +163,11 @@ def _gigya_websdk_bootstrap(client, params): }) client.get(C.URL_ACCOUNT_CDC_API + '/accounts.webSdkBootstrap', - params=params, - headers=C.GIGYA_HEADERS) + params=params, + headers=C.GIGYA_HEADERS) +@require_requests def _gigya_login(client, username, password, api_key): # Performs a login using the standard Gigya accounts.login API. # This avoids a custom SAP endpoint that triggers password change notifications. @@ -135,7 +179,7 @@ def _gigya_login(client, username, password, api_key): 'include': 'login_token' } - login_url = f"{C.URL_ACCOUNT_CDC_API}/accounts.login" + login_url = "{0}/accounts.login".format(C.URL_ACCOUNT_CDC_API) res = client.post(login_url, data=login_payload) login_response = res.json() @@ -149,11 +193,12 @@ def _gigya_login(client, username, password, api_key): 'Please log in to https://account.sap.com manually to reset it.' ) error_message = login_response.get('errorDetails', 'Unknown authentication error') - raise exceptions.AuthenticationError(f"Gigya authentication failed: {error_message} (errorCode: {error_code})") + raise exceptions.AuthenticationError("Gigya authentication failed: {0} (errorCode: {1})".format(error_message, error_code)) return login_response.get('login_token') +@require_requests def _get_id_token(client, saml_params, login_token): # Exchanges a Gigya login token for a JWT ID token. query_params = { @@ -166,6 +211,7 @@ def _get_id_token(client, saml_params, login_token): return token +@require_requests def _get_uid(client, saml_params, login_token): # Retrieves the user's unique ID (UID) using the login token. query_params = { @@ -177,36 +223,40 @@ def _get_uid(client, saml_params, login_token): return uid +@require_requests def _get_uid_details(client, uid, id_token): # Fetches detailed account information for a given UID. - url = f'{C.URL_ACCOUNT_CORE_API}/accounts/{uid}' + url = '{0}/accounts/{1}'.format(C.URL_ACCOUNT_CORE_API, uid) headers = C.GIGYA_HEADERS.copy() - headers['Authorization'] = f'Bearer {id_token}' + headers['Authorization'] = 'Bearer {0}'.format(id_token) uid_details_response = client.get(url, headers=headers).json() return uid_details_response +@require_requests def _is_uid_linked_multiple_sids(uid_details): # Checks if a Universal ID (UID) is linked to more than one S-User ID. accounts = uid_details['accounts'] linked = [] - for _, v in accounts.items(): + for _account_type, v in accounts.items(): linked.extend(v['linkedAccounts']) return len(linked) > 1 +@require_requests def _select_account(client, uid, sid, id_token): # Selects a specific S-User ID when a Universal ID is linked to multiple accounts. - url = f'{C.URL_ACCOUNT_CORE_API}/accounts/{uid}/selectedAccount' + url = '{0}/accounts/{1}/selectedAccount'.format(C.URL_ACCOUNT_CORE_API, uid) data = {'idsName': sid, 'automatic': 'false'} headers = C.GIGYA_HEADERS.copy() - headers['Authorization'] = f'Bearer {id_token}' + headers['Authorization'] = 'Bearer {0}'.format(id_token) return client.request('PUT', url, headers=headers, json=data) +@require_requests def _get_sdk_build_number(client, api_key): # Fetches the gigya.js file to extract and cache the SDK build number. global _GIGYA_SDK_BUILD_NUMBER @@ -224,11 +274,12 @@ def _get_sdk_build_number(client, api_key): return build_number +@require_requests def _cdc_api_request(client, endpoint, saml_params, query_params): # Helper to make requests to the Gigya/CDC API, handling common parameters and errors. url = '/'.join((C.URL_ACCOUNT_CDC_API, endpoint)) - query = '&'.join([f'{k}={v}' for k, v in saml_params.items()]) + query = '&'.join(['{0}={1}'.format(k, v) for k, v in saml_params.items()]) page_url = quote_plus('?'.join((C.URL_ACCOUNT_SAML_PROXY, query))) api_key = saml_params['apiKey'] @@ -251,7 +302,7 @@ def _cdc_api_request(client, endpoint, saml_params, query_params): error_code = json_response['errorCode'] if error_code != 0: - http_error_msg = '{} Error: {} for url: {}'.format( + http_error_msg = '{0} Error: {1} for url: {2}'.format( json_response['statusCode'], json_response['errorMessage'], res.url) raise HTTPError(http_error_msg, response=res) return json_response diff --git a/plugins/module_utils/client.py b/plugins/module_utils/client.py index 9bd19e1..ed37d7a 100644 --- a/plugins/module_utils/client.py +++ b/plugins/module_utils/client.py @@ -1,14 +1,36 @@ -import requests +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import re -import urllib3 from urllib.parse import urlparse -from requests.adapters import HTTPAdapter from .constants import COMMON_HEADERS - -class _SessionAllowBasicAuthRedirects(requests.Session): +try: + import requests + from requests.adapters import HTTPAdapter + _RequestsSession = requests.Session +except ImportError: + HAS_REQUESTS = False + # Placeholders to prevent errors on module load + requests = None + HTTPAdapter = object + _RequestsSession = object +else: + HAS_REQUESTS = True + +try: + import urllib3 +except ImportError: + HAS_URLLIB3 = False + # Placeholder to prevent errors on module load + urllib3 = None +else: + HAS_URLLIB3 = True + + +class _SessionAllowBasicAuthRedirects(_RequestsSession): # By default, the `Authorization` header for Basic Auth will be removed # if the redirect is to a different host. # In our case, the DirectDownloadLink with `softwaredownloads.sap.com` domain @@ -17,16 +39,21 @@ class _SessionAllowBasicAuthRedirects(requests.Session): # for sap.com domains. # This is only required for legacy API. def rebuild_auth(self, prepared_request, response): - if 'Authorization' in prepared_request.headers: + # The parent class might not be a real requests.Session if requests is not installed. + if HAS_REQUESTS and 'Authorization' in prepared_request.headers: request_hostname = urlparse(prepared_request.url).hostname if not re.match(r'.*sap.com$', request_hostname): del prepared_request.headers['Authorization'] + def _is_updated_urllib3(): # `method_whitelist` argument for Retry is deprecated since 1.26.0, # and will be removed in v2.0.0. # Typically, the default version on RedHat 8.2 is 1.24.2, # so we need to check the version of urllib3 to see if it's updated. + if not HAS_URLLIB3: + return False + urllib3_version = urllib3.__version__.split('.') if len(urllib3_version) == 2: urllib3_version.append('0') @@ -43,6 +70,11 @@ class ApiClient: # object-oriented interface for making API requests, replacing the # previous global session and request functions. def __init__(self): + if not HAS_REQUESTS: + raise ImportError("The 'requests' library is required but was not found.") + if not HAS_URLLIB3: + raise ImportError("The 'urllib3' library is required but was not found.") + self.session = _SessionAllowBasicAuthRedirects() # Configure retry logic for the session. @@ -114,4 +146,4 @@ def head(self, url, **kwargs): return self.request('HEAD', url, **kwargs) def get_cookies(self): - return self.session.cookies \ No newline at end of file + return self.session.cookies diff --git a/plugins/module_utils/exceptions.py b/plugins/module_utils/exceptions.py index a50054b..5407b44 100644 --- a/plugins/module_utils/exceptions.py +++ b/plugins/module_utils/exceptions.py @@ -1,3 +1,6 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + # Custom exceptions for the sap_launchpad collection. @@ -24,4 +27,3 @@ class DownloadError(SapLaunchpadError): class FileNotFoundError(SapLaunchpadError): # Raised when a searched file cannot be found. pass - diff --git a/plugins/module_utils/maintenance_planner/__init__.py b/plugins/module_utils/maintenance_planner/__init__.py index 9b5afe6..e69de29 100644 --- a/plugins/module_utils/maintenance_planner/__init__.py +++ b/plugins/module_utils/maintenance_planner/__init__.py @@ -1 +0,0 @@ -# This file makes the `maintenance_planner` directory into a Python package. \ No newline at end of file diff --git a/plugins/module_utils/maintenance_planner/api.py b/plugins/module_utils/maintenance_planner/api.py index 29e871e..f3e1426 100644 --- a/plugins/module_utils/maintenance_planner/api.py +++ b/plugins/module_utils/maintenance_planner/api.py @@ -1,20 +1,76 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import re import time from html import unescape +from functools import wraps from urllib.parse import urljoin -from bs4 import BeautifulSoup -from lxml import etree from .. import constants as C from .. import exceptions from ..auth import get_sso_endpoint_meta +try: + from bs4 import BeautifulSoup +except ImportError: + HAS_BS4 = False + BeautifulSoup = None +else: + HAS_BS4 = True + +try: + from lxml import etree +except ImportError: + HAS_LXML = False + etree = None +else: + HAS_LXML = True + +try: + from requests.exceptions import HTTPError +except ImportError: + HAS_REQUESTS = False + HTTPError = None +else: + HAS_REQUESTS = True + # Module-level cache _MP_XSRF_TOKEN = None _MP_TRANSACTIONS = None _MP_NAMESPACE = 'http://xml.sap.com/2012/01/mnp' +def require_bs4(func): + # A decorator to check for the 'beautifulsoup4' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_BS4: + raise ImportError("The 'beautifulsoup4' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + +def require_lxml(func): + # A decorator to check for the 'lxml' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_LXML: + raise ImportError("The 'lxml' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + +def require_requests(func): + # A decorator to check for the 'requests' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_REQUESTS: + raise ImportError("The 'requests' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + def auth_userapps(client): # Authenticates against userapps.support.sap.com to establish a session. _clear_mp_cookies(client, 'userapps') @@ -32,6 +88,7 @@ def auth_userapps(client): client.post(endpoint, data=meta) +@require_bs4 def get_transactions(client): # Retrieves a list of all available Maintenance Planner transactions. global _MP_TRANSACTIONS @@ -64,10 +121,12 @@ def get_transaction_id(client, name): if t.get('trans_display_id') == name: return t['trans_id'] - raise exceptions.FileNotFoundError(f"Transaction '{name}' not found by name or display ID.") + raise exceptions.FileNotFoundError("Transaction '{0}' not found by name or display ID.".format(name)) -def get_transaction_filename_url(client, trans_id): +@require_lxml +@require_requests +def get_transaction_filename_url(client, trans_id, validate_url=False): # Parses the files XML to get a list of (URL, Filename) tuples. xml = _get_download_files_xml(client, trans_id) e = etree.fromstring(xml.encode('utf-16')) @@ -76,13 +135,22 @@ def get_transaction_filename_url(client, trans_id): namespaces={'mnp': _MP_NAMESPACE} ) if not stack_files: - raise exceptions.FileNotFoundError(f"No stack files found in transaction ID {trans_id}.") + raise exceptions.FileNotFoundError("No stack files found in transaction ID {0}.".format(trans_id)) files = [] for f in stack_files: file_id = urljoin(C.URL_SOFTWARE_DOWNLOAD, '/file/' + f.get('id')) file_name = f.get('label') files.append((file_id, file_name)) + + if validate_url: + for pair in files: + url = pair[0] + try: + client.head(url) + except HTTPError: + raise exceptions.DownloadError('Download link is not available: {0}'.format(url)) + return files @@ -172,12 +240,13 @@ def _get_transaction(client, key, value): for t in transactions: if t.get(key) == value: return t - raise exceptions.FileNotFoundError(f"Transaction with {key}='{value}' not found.") + raise exceptions.FileNotFoundError("Transaction with {0}='{1}' not found.".format(key, value)) +@require_lxml def _build_mnp_xml(**params): # Constructs the MNP XML payload for API requests. - mnp = f'{{{_MP_NAMESPACE}}}' + mnp = '{{{0}}}'.format(_MP_NAMESPACE) request_keys = ['action', 'trans_name', 'sub_action', 'navigation'] request_attrs = {k: params.get(k, '') for k in request_keys} @@ -185,8 +254,8 @@ def _build_mnp_xml(**params): entity_keys = ['call_for', 'sessionid'] entity_attrs = {k: params.get(k, '') for k in entity_keys} - request = etree.Element(f'{mnp}request', nsmap={"mnp": _MP_NAMESPACE}, attrib=request_attrs) - entity = etree.SubElement(request, f'{mnp}entity', attrib=entity_attrs) + request = etree.Element('{0}request'.format(mnp), nsmap={"mnp": _MP_NAMESPACE}, attrib=request_attrs) + entity = etree.SubElement(request, '{0}entity'.format(mnp), attrib=entity_attrs) entity.text = '' if 'entities' in params and isinstance(params['entities'], etree._Element): @@ -199,4 +268,4 @@ def _clear_mp_cookies(client, startswith): # Clears cookies for a specific domain prefix from the client session. for cookie in client.session.cookies: if cookie.domain.startswith(startswith): - client.session.cookies.clear(domain=cookie.domain) \ No newline at end of file + client.session.cookies.clear(domain=cookie.domain) diff --git a/plugins/module_utils/maintenance_planner/main.py b/plugins/module_utils/maintenance_planner/main.py index 198de8b..83e7d5d 100644 --- a/plugins/module_utils/maintenance_planner/main.py +++ b/plugins/module_utils/maintenance_planner/main.py @@ -1,9 +1,11 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import pathlib from .. import auth, exceptions from ..client import ApiClient from . import api -from requests.exceptions import HTTPError def run_files(params): @@ -14,37 +16,41 @@ def run_files(params): msg='' ) - client = ApiClient() - username = params['suser_id'] - password = params['suser_password'] - transaction_name = params['transaction_name'] - validate_url = params['validate_url'] - try: + client = ApiClient() + username = params['suser_id'] + password = params['suser_password'] + transaction_name = params['transaction_name'] + validate_url = params['validate_url'] + auth.login(client, username, password) api.auth_userapps(client) transaction_id = api.get_transaction_id(client, transaction_name) - download_basket_details = api.get_transaction_filename_url(client, transaction_id) - - if validate_url: - for pair in download_basket_details: - url = pair[0] - try: - client.head(url) - except HTTPError: - raise exceptions.DownloadError(f'Download link is not available: {url}') + download_basket_details = api.get_transaction_filename_url(client, transaction_id, validate_url) result['download_basket'] = [{'DirectLink': i[0], 'Filename': i[1]} for i in download_basket_details] result['changed'] = True result['msg'] = "Successfully retrieved file list from SAP Maintenance Planner." - except (exceptions.SapLaunchpadError, HTTPError) as e: + except ImportError as e: + result['failed'] = True + if 'requests' in str(e): + result['missing_dependency'] = 'requests' + elif 'urllib3' in str(e): + result['missing_dependency'] = 'urllib3' + elif 'beautifulsoup4' in str(e): + result['missing_dependency'] = 'beautifulsoup4' + elif 'lxml' in str(e): + result['missing_dependency'] = 'lxml' + else: + result['msg'] = "An unexpected import error occurred: {0}".format(e) + except exceptions.SapLaunchpadError as e: result['failed'] = True result['msg'] = str(e) except Exception as e: result['failed'] = True - result['msg'] = f"An unexpected error occurred: {e}" + result['msg'] = 'An unexpected error occurred: {0}'.format(e) return result @@ -56,13 +62,13 @@ def run_stack_xml_download(params): msg='' ) - client = ApiClient() - username = params['suser_id'] - password = params['suser_password'] - transaction_name = params['transaction_name'] - dest = params['dest'] - try: + client = ApiClient() + username = params['suser_id'] + password = params['suser_password'] + transaction_name = params['transaction_name'] + dest = params['dest'] + auth.login(client, username, password) api.auth_userapps(client) @@ -70,12 +76,12 @@ def run_stack_xml_download(params): xml_content, filename = api.get_transaction_stack_xml_content(client, transaction_id) if not filename: - filename = f"{transaction_name}_stack.xml" + filename = "{0}_stack.xml".format(transaction_name) dest_path = pathlib.Path(dest) if not dest_path.is_dir(): result['failed'] = True - result['msg'] = f"Destination directory does not exist: {dest}" + result['msg'] = "Destination directory does not exist: {0}".format(dest) return result output_file = dest_path / filename @@ -85,17 +91,27 @@ def run_stack_xml_download(params): f.write(xml_content) except IOError as e: result['failed'] = True - result['msg'] = f"Failed to write to destination file {output_file}: {e}" + result['msg'] = "Failed to write to destination file {0}: {1}".format(output_file, e) return result result['changed'] = True - result['msg'] = f"SAP Maintenance Planner Stack XML successfully downloaded to {output_file}" + result['msg'] = "SAP Maintenance Planner Stack XML successfully downloaded to {0}".format(output_file) - except (exceptions.SapLaunchpadError, HTTPError) as e: + except ImportError as e: + result['failed'] = True + if 'requests' in str(e): + result['missing_dependency'] = 'requests' + elif 'urllib3' in str(e): + result['missing_dependency'] = 'urllib3' + elif 'beautifulsoup4' in str(e) or 'lxml' in str(e): + result['missing_dependency'] = 'beautifulsoup4 and/or lxml' + else: + result['msg'] = "An unexpected import error occurred: {0}".format(e) + except exceptions.SapLaunchpadError as e: result['failed'] = True result['msg'] = str(e) except Exception as e: result['failed'] = True - result['msg'] = f"An unexpected error occurred: {e}" + result['msg'] = 'An unexpected error occurred: {0}'.format(e) - return result \ No newline at end of file + return result diff --git a/plugins/module_utils/software_center/__init__.py b/plugins/module_utils/software_center/__init__.py index 6a9cf69..e69de29 100644 --- a/plugins/module_utils/software_center/__init__.py +++ b/plugins/module_utils/software_center/__init__.py @@ -1 +0,0 @@ -# This file makes the `software_center` directory into a Python package. \ No newline at end of file diff --git a/plugins/module_utils/software_center/download.py b/plugins/module_utils/software_center/download.py index 8962bcd..416624f 100644 --- a/plugins/module_utils/software_center/download.py +++ b/plugins/module_utils/software_center/download.py @@ -1,18 +1,39 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import glob import hashlib import os import time - -from requests.exceptions import ConnectionError, HTTPError +from functools import wraps from .. import auth from .. import constants as C from .. import exceptions from . import search +try: + from requests.exceptions import ConnectionError, HTTPError +except ImportError: + HAS_REQUESTS = False + ConnectionError, HTTPError = None, None +else: + HAS_REQUESTS = True + _HAS_DOWNLOAD_AUTHORIZATION = None +def require_requests(func): + # A decorator to check for the 'requests' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_REQUESTS: + raise ImportError("The 'requests' library is required but was not found.") + return func(*args, **kwargs) + return wrapper + + +@require_requests def validate_local_file_checksum(client, local_filepath, query=None, download_link=None, deduplicate=None, search_alternatives=False): # Validates a local file against the remote checksum from the server. # Returns a dictionary with the validation status and additional context. @@ -42,7 +63,8 @@ def validate_local_file_checksum(client, local_filepath, query=None, download_li remote_etag = headers.get('ETag') if not remote_etag: - result['message'] = f"Checksum validation skipped: ETag header not found for URL '{download_link_final}'. Headers received: {headers}" + result['message'] = ("Checksum validation skipped: ETag header not found for URL '{0}'. Headers received: {1}" + .format(download_link_final, headers)) return result if _is_checksum_matched(local_filepath, remote_etag): @@ -53,7 +75,7 @@ def validate_local_file_checksum(client, local_filepath, query=None, download_li result['message'] = 'Local file checksum is invalid.' except exceptions.SapLaunchpadError as e: - result['message'] = f'Checksum validation skipped: {e}' + result['message'] = 'Checksum validation skipped: {0}'.format(e) return result @@ -74,6 +96,7 @@ def check_similar_files(dest, filename): return False, [] +@require_requests def _check_download_authorization(client): # Verifies that the authenticated user has the "Software Download" authorization. # Caches the result to avoid repeated API calls. @@ -102,6 +125,7 @@ def _check_download_authorization(client): ) +@require_requests def is_download_link_available(client, url, retry=0): # Verifies if a download link is active and returns the final, resolved URL. # Returns None if the link is not available. @@ -119,6 +143,7 @@ def is_download_link_available(client, url, retry=0): return None +@require_requests def _resolve_download_link(client, url, retry=0): # Resolves a tokengen URL to the final, direct download URL. # This encapsulates the SAML token exchange logic and includes retries. @@ -141,7 +166,7 @@ def _resolve_download_link(client, url, retry=0): client.session.cookies.clear(domain='.softwaredownloads.sap.com') # Retry on 403 (Forbidden) as it can be a temporary token issue. if (isinstance(e, HTTPError) and e.response.status_code != 403) or retry >= C.MAX_RETRY_TIMES: - raise exceptions.DownloadError(f"Could not resolve download URL after {C.MAX_RETRY_TIMES} retries: {e}") + raise exceptions.DownloadError("Could not resolve download URL after {0} retries: {1}".format(C.MAX_RETRY_TIMES, e)) time.sleep(60 * (retry + 1)) return _resolve_download_link(client, url, retry + 1) @@ -150,6 +175,7 @@ def _resolve_download_link(client, url, retry=0): return endpoint +@require_requests def stream_file_to_disk(client, url, filepath, retry=0, **kwargs): # Streams a large file to disk and verifies its checksum. kwargs.update({'stream': True}) @@ -162,7 +188,7 @@ def stream_file_to_disk(client, url, filepath, retry=0, **kwargs): if os.path.exists(filepath): os.remove(filepath) if retry >= C.MAX_RETRY_TIMES: - raise exceptions.DownloadError(f"Connection failed after {C.MAX_RETRY_TIMES} retries: {e}") + raise exceptions.DownloadError("Connection failed after {0} retries: {1}".format(C.MAX_RETRY_TIMES, e)) time.sleep(60 * (retry + 1)) return stream_file_to_disk(client, url, filepath, retry + 1, **kwargs) @@ -177,7 +203,7 @@ def stream_file_to_disk(client, url, filepath, retry=0, **kwargs): os.remove(filepath) if retry >= C.MAX_RETRY_TIMES: - raise exceptions.DownloadError(f'Failed to download {url}: checksum mismatch after {C.MAX_RETRY_TIMES} retries') + raise exceptions.DownloadError('Failed to download {0}: checksum mismatch after {1} retries'.format(url, C.MAX_RETRY_TIMES)) return stream_file_to_disk(client, url, filepath, retry + 1, **kwargs) diff --git a/plugins/module_utils/software_center/main.py b/plugins/module_utils/software_center/main.py index 539b40b..1ecf1da 100644 --- a/plugins/module_utils/software_center/main.py +++ b/plugins/module_utils/software_center/main.py @@ -1,3 +1,6 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import os from .. import auth @@ -55,17 +58,17 @@ def run_software_download(params): if not validate_checksum: if os.path.exists(filepath): result['skipped'] = True - result['msg'] = f"File already exists: {filename}" + result['msg'] = "File already exists: {0}".format(filename) return result filename_similar_exists, filename_similar_names = download.check_similar_files(dest, filename) if filename_similar_exists: result['skipped'] = True - result['msg'] = f"Similar file(s) already exist: {', '.join(filename_similar_names)}" + result['msg'] = "Similar file(s) already exist: {0}".format(', '.join(filename_similar_names)) return result - client = ApiClient() try: + client = ApiClient() auth.login(client, username, password) validation_result = None @@ -91,7 +94,7 @@ def run_software_download(params): if is_valid is True: result['skipped'] = True - result['msg'] = f"File already exists and checksum is valid: {filename}" + result['msg'] = "File already exists and checksum is valid: {0}".format(filename) return result elif is_valid is False: # The existing file is invalid, remove it to allow for re-download. @@ -99,7 +102,7 @@ def run_software_download(params): os.remove(filepath) else: # Validation could not be performed result['skipped'] = True - result['msg'] = f"File already exists: {filename}. {validation_result['message']}" + result['msg'] = "File already exists: {0}. {1}".format(filename, validation_result['message']) return result alternative_found = False @@ -119,18 +122,18 @@ def run_software_download(params): validation_result = download.validate_local_file_checksum(client, alt_filepath, download_link=download_link) if validation_result['validated'] is True: result['skipped'] = True - result['msg'] = f"Alternative file {download_filename} already exists and checksum is valid." + result['msg'] = "Alternative file {0} already exists and checksum is valid.".format(download_filename) return result elif validation_result['validated'] is False: # The existing alternative file is invalid, remove it to allow for re-download. os.remove(alt_filepath) else: # Validation could not be performed result['skipped'] = True - result['msg'] = f"Alternative file {download_filename} already exists. {validation_result['message']}" + result['msg'] = "Alternative file {0} already exists. {1}".format(download_filename, validation_result['message']) return result else: result['skipped'] = True - result['msg'] = f"File with correct/alternative name already exists: {download_filename}" + result['msg'] = "File with correct/alternative name already exists: {0}".format(download_filename) return result final_url = download.is_download_link_available(client, download_link) @@ -138,7 +141,7 @@ def run_software_download(params): if dry_run: msg = f"SAP Software is available to download: {download_filename}" if alternative_found: - msg = f"Alternative SAP Software is available to download: {download_filename} - original file {query} is not available" + msg = "Alternative SAP Software is available to download: {0} - original file {1} is not available".format(download_filename, query) result['msg'] = msg else: # The link is already resolved, just download it. @@ -147,22 +150,35 @@ def run_software_download(params): result['changed'] = True if validation_result and validation_result.get('validated') is False: - result['msg'] = f"Successfully re-downloaded {download_filename} due to an invalid checksum." + result['msg'] = "Successfully re-downloaded {0} due to an invalid checksum.".format(download_filename) elif alternative_found: - result['msg'] = f"Successfully downloaded alternative SAP software: {download_filename} - original file {query} is not available to download" + result['msg'] = ("Successfully downloaded alternative SAP software: {0} " + "- original file {1} is not available to download".format(download_filename, query)) else: - result['msg'] = f"Successfully downloaded SAP software: {download_filename}" + result['msg'] = "Successfully downloaded SAP software: {0}".format(download_filename) else: result['failed'] = True - result['msg'] = f"Download link for {download_filename} is not available." + result['msg'] = "Download link for {0} is not available.".format(download_filename) + except ImportError as e: + result['failed'] = True + if 'requests' in str(e): + result['missing_dependency'] = 'requests' + elif 'urllib3' in str(e): + result['missing_dependency'] = 'urllib3' + elif 'beautifulsoup4' in str(e): + result['missing_dependency'] = 'beautifulsoup4' + elif 'lxml' in str(e): + result['missing_dependency'] = 'lxml' + else: + result['msg'] = "An unexpected import error occurred: {0}".format(e) except exceptions.SapLaunchpadError as e: result['failed'] = True result['msg'] = str(e) except Exception as e: result['failed'] = True - result['msg'] = f"An unexpected error occurred: {type(e).__name__} - {e}" + result['msg'] = "An unexpected error occurred: {0} - {1}".format(type(e).__name__, e) finally: download.clear_download_key_cookie(client) - return result \ No newline at end of file + return result diff --git a/plugins/module_utils/software_center/search.py b/plugins/module_utils/software_center/search.py index 535e8ac..8d2e3cf 100644 --- a/plugins/module_utils/software_center/search.py +++ b/plugins/module_utils/software_center/search.py @@ -1,4 +1,6 @@ -import csv +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import json import os import re @@ -21,12 +23,12 @@ def find_file(client, name, deduplicate, search_alternatives): if files_count == 0: # If no exact match is found, and alternatives are requested, perform a fuzzy search. if not search_alternatives: - raise FileNotFoundError(f'File "{name}" is not available. To find a replacement, enable "search_alternatives".') + raise FileNotFoundError('File "{0}" is not available. To find a replacement, enable "search_alternatives".'.format(name)) software_fuzzy_found = _search_software_fuzzy(client, name) software_fuzzy_filtered, suggested_filename = _filter_fuzzy_search(software_fuzzy_found, name) if len(software_fuzzy_filtered) == 0: - raise FileNotFoundError(f'File "{name}" is not available and no alternatives could be found.') + raise FileNotFoundError('File "{0}" is not available and no alternatives could be found.'.format(name)) software_fuzzy_alternatives = software_fuzzy_filtered[0].get('Title') @@ -41,10 +43,10 @@ def find_file(client, name, deduplicate, search_alternatives): alternatives_count = len(software_search_alternatives_filtered) if alternatives_count == 0: - raise FileNotFoundError(f'File "{name}" is not available and no alternatives could be found.') + raise FileNotFoundError('File "{0}" is not available and no alternatives could be found.'.format(name)) elif alternatives_count > 1 and deduplicate == '': names = [s['Title'] for s in software_search_alternatives_filtered] - raise FileNotFoundError(f'More than one alternative was found: {", ".join(names)}. Please use a more specific filename.') + raise FileNotFoundError('More than one alternative was found: {0}. Please use a more specific filename.'.format(", ".join(names))) elif alternatives_count > 1 and deduplicate == 'first': software_found = software_search_alternatives_filtered[0] alternative_found = True @@ -59,7 +61,7 @@ def find_file(client, name, deduplicate, search_alternatives): elif files_count > 1 and deduplicate == '': # Handle cases where the direct search returns multiple exact matches. names = [s['Title'] for s in software_filtered] - raise FileNotFoundError(f'More than one result was found: {", ".join(names)}. Please use the correct full filename.') + raise FileNotFoundError('More than one result was found: {0}. Please use the correct full filename.'.format(", ".join(names))) elif files_count > 1 and deduplicate == 'first': software_found = software_filtered[0] elif files_count > 1 and deduplicate == 'last': @@ -168,7 +170,7 @@ def _prepare_search_filename_specific(filename): if filename_base.startswith(swpm_version): return swpm_version - # Example: SUM11SP04_2-80006858.SAR returns SUM11SP04 + # Example: SUM11SP04_2-80006858.SAR returns SUM11SP04 if filename_base.startswith('SUM'): return filename.split('-')[0].split('_')[0] @@ -188,17 +190,17 @@ def _prepare_search_filename_specific(filename): # Example: IMDB_LCAPPS_122P_3300-20010426.SAR returns IMDB_LCAPPS_122 elif filename_base.startswith('IMDB_LCAPPS_1'): filename_parts = filename.split('-')[0].rsplit('_', 2) - return f"{filename_parts[0]}_{filename_parts[1][:3]}" + return "{0}_{1}".format(filename_parts[0], filename_parts[1][:3]) # Example: IMDB_LCAPPS_2067P_400-80002183.SAR returns IMDB_LCAPPS_206 elif filename_base.startswith('IMDB_LCAPPS_2'): filename_parts = filename.split('-')[0].rsplit('_', 2) - return f"{filename_parts[0]}_{filename_parts[1][:3]}" + return "{0}_{1}".format(filename_parts[0], filename_parts[1][:3]) # Example: IMDB_SERVER20_067_4-80002046.SAR returns IMDB_SERVER20_06 (SPS06) elif filename_base.startswith('IMDB_SERVER'): filename_parts = filename.split('-')[0].rsplit('_', 2) - return f"{filename_parts[0]}_{filename_parts[1][:2]}" + return "{0}_{1}".format(filename_parts[0], filename_parts[1][:2]) # Example: SAPEXE_100-80005374.SAR returns SAPEXE_100 elif filename_base.startswith('SAPEXE'): @@ -294,7 +296,7 @@ def _get_next_page_query(desc): # Extracts the next page query URL for paginated search results. if '|' not in desc: return None - _, url = desc.split('|') + _prefix, url = desc.split('|') return url.strip() diff --git a/plugins/module_utils/systems/__init__.py b/plugins/module_utils/systems/__init__.py index 67a78bd..e69de29 100644 --- a/plugins/module_utils/systems/__init__.py +++ b/plugins/module_utils/systems/__init__.py @@ -1 +0,0 @@ -# This file makes the `systems` directory into a Python package. \ No newline at end of file diff --git a/plugins/module_utils/systems/api.py b/plugins/module_utils/systems/api.py index 0de1dae..a00fef1 100644 --- a/plugins/module_utils/systems/api.py +++ b/plugins/module_utils/systems/api.py @@ -1,8 +1,11 @@ +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import json import time +from functools import wraps from urllib.parse import urljoin -from requests.exceptions import HTTPError from .. import constants as C from .. import exceptions @@ -12,35 +15,37 @@ class InstallationNotFoundError(Exception): def __init__(self, installation_nr, available_installations): self.installation_nr = installation_nr self.available_installations = available_installations - super().__init__(f"Installation number '{installation_nr}' not found. Available installations: {available_installations}") + super(InstallationNotFoundError, self).__init__( + "Installation number '{0}' not found. Available installations: {1}".format(installation_nr, available_installations) + ) class SystemNotFoundError(Exception): def __init__(self, system_nr, details): self.system_nr = system_nr self.details = details - super().__init__(f"System with number '{system_nr}' not found. Details: {details}") + super(SystemNotFoundError, self).__init__("System with number '{0}' not found. Details: {1}".format(system_nr, details)) class ProductNotFoundError(Exception): def __init__(self, product, available_products): self.product = product self.available_products = available_products - super().__init__(f"Product '{product}' not found. Available products: {available_products}") + super(ProductNotFoundError, self).__init__("Product '{0}' not found. Available products: {1}".format(product, available_products)) class VersionNotFoundError(Exception): def __init__(self, version, available_versions): self.version = version self.available_versions = available_versions - super().__init__(f"Version '{version}' not found. Available versions: {available_versions}") + super(VersionNotFoundError, self).__init__("Version '{0}' not found. Available versions: {1}".format(version, available_versions)) class LicenseTypeInvalidError(Exception): def __init__(self, license_type, available_license_types): self.license_type = license_type self.available_license_types = available_license_types - super().__init__(f"License type '{license_type}' is invalid. Available types: {available_license_types}") + super(LicenseTypeInvalidError, self).__init__("License type '{0}' is invalid. Available types: {1}".format(license_type, available_license_types)) class DataInvalidError(Exception): @@ -49,18 +54,42 @@ def __init__(self, scope, unknown_fields, missing_required_fields, fields_with_i self.unknown_fields = unknown_fields self.missing_required_fields = missing_required_fields self.fields_with_invalid_option = fields_with_invalid_option - super().__init__(f"Invalid data for {scope}: Unknown fields: {unknown_fields}, Missing required fields: {missing_required_fields}, Invalid options: {fields_with_invalid_option}") + message = ("Invalid data for {0}: Unknown fields: {1}, Missing required fields: {2}, " + "Invalid options: {3}".format(scope, unknown_fields, missing_required_fields, + fields_with_invalid_option)) + super(DataInvalidError, self).__init__(message) + + +try: + from requests.exceptions import HTTPError +except ImportError: + HAS_REQUESTS = False + HTTPError = None +else: + HAS_REQUESTS = True + + +def require_requests(func): + # A decorator to check for the 'requests' library before executing a function. + @wraps(func) + def wrapper(*args, **kwargs): + if not HAS_REQUESTS: + raise ImportError("The 'requests' library is required but was not found.") + return func(*args, **kwargs) + return wrapper +@require_requests def get_systems(client, filter_str): # Retrieves a list of systems based on an OData filter string. - query_path = f"Systems?$filter={filter_str}" + query_path = "Systems?$filter={0}".format(filter_str) return client.get(_url(query_path), headers=_headers({})).json()['d']['results'] +@require_requests def get_system(client, system_nr, installation_nr, username): # Retrieves details for a single, specific system. - filter_str = f"Uname eq '{username}' and Insnr eq '{installation_nr}' and Sysnr eq '{system_nr}'" + filter_str = "Uname eq '{0}' and Insnr eq '{1}' and Sysnr eq '{2}'".format(username, installation_nr, system_nr) try: systems = get_systems(client, filter_str) except HTTPError as err: @@ -76,14 +105,17 @@ def get_system(client, system_nr, installation_nr, username): system = systems[0] if 'Prodver' not in system and 'Version' not in system: - raise exceptions.SapLaunchpadError(f"System {system_nr} was found, but it is missing a required Product Version ID (checked for 'Prodver' and 'Version' keys). System details: {system}") + message = ("System {0} was found, but it is missing a required Product Version ID " + "(checked for 'Prodver' and 'Version' keys). System details: {1}".format(system_nr, system)) + raise exceptions.SapLaunchpadError(message) return system +@require_requests def get_product_id(client, product_name, installation_nr, username): # Finds the internal product ID for a given product name. - query_path = f"SysProducts?$filter=Uname eq '{username}' and Insnr eq '{installation_nr}' and Sysnr eq '' and Nocheck eq ''" + query_path = "SysProducts?$filter=Uname eq '{0}' and Insnr eq '{1}' and Sysnr eq '' and Nocheck eq ''".format(username, installation_nr) products = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] product = next((p for p in products if p['Description'] == product_name), None) if product is None: @@ -91,9 +123,10 @@ def get_product_id(client, product_name, installation_nr, username): return product['Product'] +@require_requests def get_version_id(client, version_name, product_id, installation_nr, username): # Finds the internal version ID for a given product version name. - query_path = f"SysVersions?$filter=Uname eq '{username}' and Insnr eq '{installation_nr}' and Product eq '{product_id}' and Nocheck eq ''" + query_path = "SysVersions?$filter=Uname eq '{0}' and Insnr eq '{1}' and Product eq '{2}' and Nocheck eq ''".format(username, installation_nr, product_id) versions = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] version = next((v for v in versions if v['Description'] == version_name), None) if version is None: @@ -101,17 +134,19 @@ def get_version_id(client, version_name, product_id, installation_nr, username): return version['Version'] +@require_requests def validate_installation(client, installation_nr, username): # Checks if the user has access to the specified installation number. - query_path = f"Installations?$filter=Ubname eq '{username}' and ValidateOnly eq ''" + query_path = "Installations?$filter=Ubname eq '{0}' and ValidateOnly eq ''".format(username) installations = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] if not any(i['Insnr'] == installation_nr for i in installations): raise InstallationNotFoundError(installation_nr, [i['Insnr'] for i in installations]) +@require_requests def validate_system_data(client, data, version_id, system_nr, installation_nr, username): # Validates user-provided system data against the fields supported by the API for a given product version. - query_path = f"SystData?$filter=Pvnr eq '{version_id}' and Insnr eq '{installation_nr}'" + query_path = "SystData?$filter=Pvnr eq '{0}' and Insnr eq '{1}'".format(version_id, installation_nr) results = client.get(_url(query_path), headers=_headers({})).json()['d']['results'][0] possible_fields = json.loads(results['Output']) final_fields = _validate_user_data_against_supported_fields("system", data, possible_fields) @@ -121,7 +156,7 @@ def validate_system_data(client, data, version_id, system_nr, installation_nr, u final_fields['Uname'] = username final_fields['Sysnr'] = system_nr final_fields_for_check = [{"name": k, "value": v} for k, v in final_fields.items()] - query_path = f"SystemDataCheck?$filter=Nocheck eq '' and Data eq '{json.dumps(final_fields_for_check)}'" + query_path = "SystemDataCheck?$filter=Nocheck eq '' and Data eq '{0}'".format(json.dumps(final_fields_for_check)) results = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] warning = None @@ -132,9 +167,10 @@ def validate_system_data(client, data, version_id, system_nr, installation_nr, u return final_fields_lower, warning +@require_requests def validate_licenses(client, licenses, version_id, installation_nr, username): # Validates user-provided license data against the license types and fields supported by the API. - query_path = f"LicenseType?$filter=PRODUCT eq '{version_id}' and INSNR eq '{installation_nr}' and Uname eq '{username}' and Nocheck eq 'X'" + query_path = "LicenseType?$filter=PRODUCT eq '{0}' and INSNR eq '{1}' and Uname eq '{2}' and Nocheck eq 'X'".format(version_id, installation_nr, username) results = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] available_license_types = {r["LICENSETYPE"] for r in results} license_data = [] @@ -144,7 +180,7 @@ def validate_licenses(client, licenses, version_id, installation_nr, username): if result is None: raise LicenseTypeInvalidError(lic['type'], available_license_types) - final_fields = _validate_user_data_against_supported_fields(f'license {lic["type"]}', lic['data'], json.loads(result["Selfields"])) + final_fields = _validate_user_data_against_supported_fields('license {0}'.format(lic["type"]), lic['data'], json.loads(result["Selfields"])) final_fields = {k.upper(): v for k, v in final_fields.items()} final_fields["LICENSETYPE"] = result['PRODID'] final_fields["LICENSETYPETEXT"] = result['LICENSETYPE'] @@ -152,11 +188,12 @@ def validate_licenses(client, licenses, version_id, installation_nr, username): return license_data +@require_requests def get_existing_licenses(client, system_nr, username): # Retrieves all existing license keys for a given system. # When updating the licenses based on the results here, the backend expects a completely different format. # This function transforms the response to the format the backend expects for subsequent update calls. - query_path = f"LicenseKeys?$filter=Uname eq '{username}' and Sysnr eq '{system_nr}'" + query_path = "LicenseKeys?$filter=Uname eq '{0}' and Sysnr eq '{1}'".format(username, system_nr) results = client.get(_url(query_path), headers=_headers({})).json()['d']['results'] return [ { @@ -168,6 +205,7 @@ def get_existing_licenses(client, system_nr, username): ] +@require_requests def generate_licenses(client, license_data, existing_licenses, version_id, installation_nr, username): # Generates new license keys for a system. body = { @@ -183,6 +221,7 @@ def generate_licenses(client, license_data, existing_licenses, version_id, insta return json.loads(response['d']['Result']) +@require_requests def submit_system(client, is_new, system_data, generated_licenses, username): # Submits all system and license data to create or update a system. # The SAP Backend requires a completely different format for the license data (`matdata`) @@ -206,17 +245,20 @@ def submit_system(client, is_new, system_data, generated_licenses, username): licdata = json.loads(response['d']['licdata']) if not licdata: raise exceptions.SapLaunchpadError( - "The API call to submit the system was successful, but the response did not contain the expected system number. " - f"The 'licdata' field in the API response was empty: {response['d']['licdata']}" + "The API call to submit the system was successful, but the response did not contain the expected system number. " + + "The 'licdata' field in the API response was empty: {0}".format(response['d']['licdata']) ) return licdata[0]['VALUE'] +@require_requests def get_license_key_numbers(client, license_data, system_nr, username): # Retrieves the unique key numbers for a list of recently created licenses. key_nrs = [] for lic in license_data: - query_path = f"LicenseKeys?$filter=Uname eq '{username}' and Sysnr eq '{system_nr}' and Prodid eq '{lic['LICENSETYPE']}' and Hwkey eq '{lic['HWKEY']}'" + query_path_template = ("LicenseKeys?$filter=Uname eq '{0}' and Sysnr eq '{1}' and " + "Prodid eq '{2}' and Hwkey eq '{3}'") + query_path = query_path_template.format(username, system_nr, lic['LICENSETYPE'], lic['HWKEY']) # Retry logic to handle potential replication delay in the backend API after a license is submitted. for attempt in range(9): @@ -229,19 +271,22 @@ def get_license_key_numbers(client, license_data, system_nr, username): time.sleep(10) # Wait 10 seconds before retrying else: # This 'else' belongs to the 'for' loop, it runs if the loop completes without a 'break' raise exceptions.SapLaunchpadError( - f"Could not find license key number for license type '{lic['LICENSETYPE']}' and HW key '{lic['HWKEY']}' " - f"on system '{system_nr}' after submitting the changes. There might be a replication delay in the SAP backend." + ("Could not find license key number for license type '{0}' and HW key '{1}' " + "on system '{2}' after submitting the changes. There might be a replication delay in the SAP backend.") + .format(lic['LICENSETYPE'], lic['HWKEY'], system_nr) ) return key_nrs +@require_requests def download_licenses(client, key_nrs): # Downloads the license key file content for a list of key numbers. keys_json = json.dumps([{"Keynr": key_nr} for key_nr in key_nrs]) - return client.get(_url(f"FileContent(Keynr='{keys_json}')/$value")).content + return client.get(_url("FileContent(Keynr='{0}')/$value".format(keys_json))).content +@require_requests def delete_licenses(client, licenses_to_delete, existing_licenses, version_id, installation_nr, username): # Deletes a list of specified licenses from a system. body = { @@ -259,14 +304,17 @@ def delete_licenses(client, licenses_to_delete, existing_licenses, version_id, i def _url(query_path): # Helper to construct the full URL for the systems provisioning service. - return f'{C.URL_SYSTEMS_PROVISIONING}/{query_path}' + return '{0}/{1}'.format(C.URL_SYSTEMS_PROVISIONING, query_path) def _headers(additional_headers): # Helper to construct standard request headers. - return {**{'Accept': 'application/json'}, **additional_headers} + headers = {'Accept': 'application/json'} + headers.update(additional_headers) + return headers +@require_requests def _get_csrf_token(client): # Fetches the CSRF token required for POST/write operations. # Add Origin and a more specific Referer header, as the service may require them to issue a CSRF token. @@ -318,4 +366,4 @@ def _validate_user_data_against_supported_fields(scope, user_data, possible_fiel if len(unknown_fields) > 0 or len(missing_required_fields) > 0 or len(fields_with_invalid_option) > 0: raise DataInvalidError(scope, unknown_fields, missing_required_fields, fields_with_invalid_option) - return final_fields \ No newline at end of file + return final_fields diff --git a/plugins/module_utils/systems/main.py b/plugins/module_utils/systems/main.py index 0060d8a..ecd2216 100644 --- a/plugins/module_utils/systems/main.py +++ b/plugins/module_utils/systems/main.py @@ -1,6 +1,7 @@ -import os +from __future__ import absolute_import, division, print_function +__metaclass__ = type + import pathlib -from requests.exceptions import HTTPError from .. import auth, exceptions from ..client import ApiClient @@ -10,10 +11,21 @@ def run_systems_info(params): # Main runner function for the systems_info module. result = {'changed': False, 'failed': False, 'systems': []} - client = ApiClient() + try: + client = ApiClient() auth.login(client, params['suser_id'], params['suser_password']) result['systems'] = api.get_systems(client, params['filter']) + except ImportError as e: + result['failed'] = True + if 'requests' in str(e): + result['missing_dependency'] = 'requests' + elif 'urllib3' in str(e): + result['missing_dependency'] = 'urllib3' + elif 'beautifulsoup4' in str(e): + result['missing_dependency'] = 'beautifulsoup4' + else: + result['msg'] = "An unexpected import error occurred: {0}".format(e) except (exceptions.SapLaunchpadError, api.SystemNotFoundError) as e: result['failed'] = True result['msg'] = str(e) @@ -23,14 +35,15 @@ def run_systems_info(params): def run_license_keys(params): # Main runner function for the license_keys module. result = {'changed': False, 'failed': False, 'warnings': []} - client = ApiClient() - username = params['suser_id'] - password = params['suser_password'] - installation_nr = params['installation_nr'] - system_nr = params['system_nr'] - state = params['state'] try: + client = ApiClient() + username = params['suser_id'] + password = params['suser_password'] + installation_nr = params['installation_nr'] + system_nr = params['system_nr'] + state = params['state'] + auth.login(client, username, password) api.validate_installation(client, installation_nr, username) @@ -43,14 +56,15 @@ def run_license_keys(params): existing_systems = api.get_systems(client, filter_str) if len(existing_systems) == 1: system_nr = existing_systems[0]['Sysnr'] - result['warnings'].append(f"A system with SID '{sid}' already exists. Using system number {system_nr} for update.") + result['warnings'].append("A system with SID '{0}' already exists. Using system number {1} for update.".format(sid, system_nr)) elif len(existing_systems) > 1: # Ambiguous situation: multiple systems with the same SID. # Force user to provide system_nr to select one. system_nrs_found = [s['Sysnr'] for s in existing_systems] result['failed'] = True - result['msg'] = (f"Multiple systems with SID '{sid}' found under installation '{installation_nr}': " - f"{', '.join(system_nrs_found)}. Please provide a specific 'system_nr' to select which system to update.") + msg_template = ("Multiple systems with SID '{0}' found under installation '{1}': {2}. " + "Please provide a specific 'system_nr' to select which system to update.") + result['msg'] = msg_template.format(sid, installation_nr, ', '.join(system_nrs_found)) return result is_new_system = not system_nr @@ -73,7 +87,7 @@ def run_license_keys(params): result['changed'] = True result['system_nr'] = system_nr - result['msg'] = f"System {system_nr} created successfully." + result['msg'] = "System {0} created successfully.".format(system_nr) else: # Existing system system = api.get_system(client, system_nr, installation_nr, username) @@ -81,18 +95,18 @@ def run_license_keys(params): # We check for 'Version' first, then fall back to 'Prodver' for compatibility. version_id = system.get('Version') or system.get('Prodver') if not version_id: - raise exceptions.SapLaunchpadError(f"System {system_nr} is missing a required Product Version ID.") + raise exceptions.SapLaunchpadError("System {0} is missing a required Product Version ID.".format(system_nr)) existing_licenses = api.get_existing_licenses(client, system_nr, username) # The API requires a sysdata payload even for an edit operation. # It must contain at least the installation number, system number, product version, and system ID. sysid = system.get('sysid') if not sysid: - raise exceptions.SapLaunchpadError(f"System {system_nr} is missing a required System ID ('sysid').") + raise exceptions.SapLaunchpadError("System {0} is missing a required System ID ('sysid').".format(system_nr)) systype = system.get('systype') if not systype: - raise exceptions.SapLaunchpadError(f"System {system_nr} is missing a required System Type ('systype').") + raise exceptions.SapLaunchpadError("System {0} is missing a required System Type ('systype').".format(system_nr)) sysdata_for_edit = [ {"name": "insnr", "value": installation_nr}, @@ -109,7 +123,10 @@ def run_license_keys(params): return result license_data = api.validate_licenses(client, user_licenses, version_id, installation_nr, username) - new_or_changed = [l for l in license_data if not any(l['HWKEY'] == el['HWKEY'] and l['LICENSETYPE'] == el['LICENSETYPE'] for el in existing_licenses)] + new_or_changed = [ + l for l in license_data + if not any(l['HWKEY'] == el['HWKEY'] and l['LICENSETYPE'] == el['LICENSETYPE'] for el in existing_licenses) + ] if not new_or_changed: result['msg'] = "System and licenses are already in the desired state." @@ -118,7 +135,7 @@ def run_license_keys(params): generated = api.generate_licenses(client, new_or_changed, existing_licenses, version_id, installation_nr, username) api.submit_system(client, False, sysdata_for_edit, generated, username) result['changed'] = True - result['msg'] = f"System {system_nr} licenses updated successfully." + result['msg'] = "System {0} licenses updated successfully.".format(system_nr) elif state == 'absent': user_licenses_to_keep = params.get('licenses', []) @@ -126,7 +143,11 @@ def run_license_keys(params): licenses_to_delete = existing_licenses else: validated_to_keep = api.validate_licenses(client, user_licenses_to_keep, version_id, installation_nr, username) - key_nrs_to_keep = [l['KEYNR'] for l in existing_licenses if any(k['HWKEY'] == l['HWKEY'] and k['LICENSETYPE'] == l['LICENSETYPE'] for k in validated_to_keep)] + key_nrs_to_keep = [ + l['KEYNR'] for l in existing_licenses if any( + k['HWKEY'] == l['HWKEY'] and k['LICENSETYPE'] == l['LICENSETYPE'] for k in validated_to_keep + ) + ] licenses_to_delete = [l for l in existing_licenses if l['KEYNR'] not in key_nrs_to_keep] if not licenses_to_delete: @@ -136,7 +157,7 @@ def run_license_keys(params): deleted_licenses = api.delete_licenses(client, licenses_to_delete, existing_licenses, version_id, installation_nr, username) api.submit_system(client, False, sysdata_for_edit, deleted_licenses, username) result['changed'] = True - result['msg'] = f"Successfully deleted licenses from system {system_nr}." + result['msg'] = "Successfully deleted licenses from system {0}.".format(system_nr) # Download/return license file content if applicable if state == 'present': @@ -153,20 +174,31 @@ def run_license_keys(params): dest_path = pathlib.Path(params['download_path']) if not dest_path.is_dir(): result['failed'] = True - result['msg'] = f"Destination for license file does not exist or is not a directory: {dest_path}" + result['msg'] = "Destination for license file does not exist or is not a directory: {0}".format(dest_path) return result - output_file = dest_path / f"{system_nr}_licenses.txt" + output_file = dest_path / "{0}_licenses.txt".format(system_nr) try: with open(output_file, 'w', encoding='utf-8') as f: f.write(content_str) current_msg = result.get('msg', '') - download_msg = f"License file downloaded to {output_file}." - result['msg'] = f"{current_msg} {download_msg}".strip() + download_msg = "License file downloaded to {0}.".format(output_file) + result['msg'] = "{0} {1}".format(current_msg, download_msg).strip() except IOError as e: result['failed'] = True - result['msg'] = f"Failed to write license file: {e}" + result['msg'] = "Failed to write license file: {0}".format(e) + + except ImportError as e: + result['failed'] = True + if 'requests' in str(e): + result['missing_dependency'] = 'requests' + elif 'urllib3' in str(e): + result['missing_dependency'] = 'urllib3' + elif 'beautifulsoup4' in str(e): + result['missing_dependency'] = 'beautifulsoup4' + else: + result['msg'] = "An unexpected import error occurred: {0}".format(e) except (exceptions.SapLaunchpadError, api.InstallationNotFoundError, @@ -180,6 +212,6 @@ def run_license_keys(params): result['msg'] = str(e) except Exception as e: result['failed'] = True - result['msg'] = f"An unexpected error occurred: {type(e).__name__} - {e}" + result['msg'] = "An unexpected error occurred: {0} - {1}".format(type(e).__name__, e) - return result \ No newline at end of file + return result diff --git a/plugins/modules/license_keys.py b/plugins/modules/license_keys.py index 344b976..25a4af3 100644 --- a/plugins/modules/license_keys.py +++ b/plugins/modules/license_keys.py @@ -12,7 +12,7 @@ description: - This ansible module creates and updates systems and their license keys using the Launchpad API. - - It is closely modeled after the interactions in the portal U(https://me.sap.com/licensekey): + - It is closely modeled after the interactions in the portal U(https://me.sap.com/licensekey) - First, a SAP system is defined by its SID, product, version and other data. - Then, for this system, license keys are defined by license type, HW key and potential other attributes. - The system and license data is then validated and submitted to the Launchpad API and the license key files returned to the caller. @@ -31,9 +31,8 @@ - SAP S-User Password. required: true type: str - no_log: true installation_nr: - description: + description: - Number of the Installation for which the system should be created/updated required: true type: str @@ -59,7 +58,7 @@ required: true type: str data: - description: + description: - The data attributes of the system. The possible attributes are defined by product and version. - Running the module without any data attributes will return in the error message which attributes are supported/required. required: true @@ -80,13 +79,13 @@ required: true type: str data: - description: + description: - The data attributes of the licenses. The possible attributes are defined by product and version. - Running the module without any data attributes will return in the error message which attributes are supported/required - In practice, most license types require at least a hardware key (hwkey) and expiry date (expdate) required: true type: dict - + delete_other_licenses: description: - Whether licenses other than the ones specified in the licenses attributes should be deleted. @@ -100,7 +99,8 @@ type: path author: - - Lab for SAP Solutions + - Matthias Winzeler (@MatthiasWinzeler) + - Marcel Mamula (@marcelmamula) ''' @@ -169,13 +169,13 @@ SWPRODUCTLIMIT=2147483647 SYSTEM-NR=00000000023456789 system_nr: - description: The number of the system which was created/updated. + description: The number of the system which was created/updated. returned: on success type: str sample: "0000123456" ''' -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ..module_utils.systems import main as systems_runner @@ -231,6 +231,8 @@ def run_module(): result = systems_runner.run_license_keys(params) if result.get('failed'): + if result.get('missing_dependency'): + module.fail_json(msg=missing_required_lib(result['missing_dependency'])) module.fail_json(**result) else: module.exit_json(**result) diff --git a/plugins/modules/maintenance_planner_files.py b/plugins/modules/maintenance_planner_files.py index a2bd3f4..58460f1 100644 --- a/plugins/modules/maintenance_planner_files.py +++ b/plugins/modules/maintenance_planner_files.py @@ -2,6 +2,8 @@ from __future__ import absolute_import, division, print_function +__metaclass__ = type + DOCUMENTATION = r''' --- module: maintenance_planner_files @@ -31,8 +33,14 @@ - Transaction Name or Transaction Display ID from Maintenance Planner. required: true type: str + validate_url: + description: + - Validates if the download URLs are accessible before returning them. + type: bool + default: false author: - - SAP LinuxLab + - Matthias Winzeler (@MatthiasWinzeler) + - Marcel Mamula (@marcelmamula) ''' @@ -70,8 +78,7 @@ sample: "SAPCAR_1324-80000936.EXE" ''' -import requests -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ..module_utils.maintenance_planner import main as maintenance_planner_runner @@ -104,6 +111,8 @@ def run_module(): # The runner function indicates failure via a key in the result. if result.get('failed'): + if result.get('missing_dependency'): + module.fail_json(msg=missing_required_lib(result['missing_dependency'])) module.fail_json(**result) else: module.exit_json(**result) diff --git a/plugins/modules/maintenance_planner_stack_xml_download.py b/plugins/modules/maintenance_planner_stack_xml_download.py index 4473078..dd54760 100644 --- a/plugins/modules/maintenance_planner_stack_xml_download.py +++ b/plugins/modules/maintenance_planner_stack_xml_download.py @@ -2,6 +2,8 @@ from __future__ import absolute_import, division, print_function +__metaclass__ = type + DOCUMENTATION = r''' --- module: maintenance_planner_stack_xml_download @@ -37,7 +39,9 @@ required: true type: str author: - - SAP LinuxLab + - Matthias Winzeler (@MatthiasWinzeler) + - Sean Freeman (@sean-freeman) + - Marcel Mamula (@marcelmamula) ''' @@ -62,8 +66,7 @@ sample: "SAP Maintenance Planner Stack XML successfully downloaded to /tmp/MP_STACK_20211015_044854.xml" ''' -import requests -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ..module_utils.maintenance_planner import main as maintenance_planner_runner @@ -97,6 +100,8 @@ def run_module(): # The runner function indicates failure via a key in the result. if result.get('failed'): + if result.get('missing_dependency'): + module.fail_json(msg=missing_required_lib(result['missing_dependency'])) module.fail_json(**result) else: module.exit_json(**result) diff --git a/plugins/modules/software_center_download.py b/plugins/modules/software_center_download.py index c0bb800..7268659 100644 --- a/plugins/modules/software_center_download.py +++ b/plugins/modules/software_center_download.py @@ -2,6 +2,8 @@ from __future__ import absolute_import, division, print_function +__metaclass__ = type + DOCUMENTATION = r''' --- module: software_center_download @@ -32,24 +34,25 @@ description: - "Deprecated. Use 'search_query' instead." required: false + default: '' type: str - deprecated: - alternative: search_query - removed_in: "1.2.0" search_query: description: - Filename of the SAP software to download. required: false + default: '' type: str download_link: description: - Direct download link to the SAP software. required: false + default: '' type: str download_filename: description: - Download filename of the SAP software. required: false + default: '' type: str dest: description: @@ -58,27 +61,35 @@ type: str deduplicate: description: - - "Specifies how to handle multiple search results for the same filename. Choices are `first` (oldest) or `last` (newest)." - choices: [ 'first', 'last' ] + - "Specifies how to handle multiple search results for the same filename. + - Choices are `first` (oldest) or `last` (newest)." + choices: [ 'first', 'last', '' ] required: false + default: '' type: str search_alternatives: description: - Enable search for alternative packages, when filename is not available. required: false + default: false type: bool dry_run: description: - Check availability of SAP Software without downloading. required: false + default: false type: bool validate_checksum: description: - - If a file with the same name already exists at the destination, validate its checksum against the remote file. If the checksum is invalid, the local file will be removed and re-downloaded. + - If a file with the same name already exists at the destination, validate its checksum against the remote file. + - If the checksum is invalid, the local file will be removed and re-downloaded. required: false + default: false type: bool author: - - SAP LinuxLab + - Matthias Winzeler (@MatthiasWinzeler) + - Sean Freeman (@sean-freeman) + - Marcel Mamula (@marcelmamula) ''' @@ -132,7 +143,7 @@ type: bool ''' -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ..module_utils.software_center import main as software_center_runner @@ -170,6 +181,8 @@ def run_module(): # The runner function indicates failure via a key in the result. if result.get('failed'): + if result.get('missing_dependency'): + module.fail_json(msg=missing_required_lib(result['missing_dependency'])) module.fail_json(**result) else: module.exit_json(**result) diff --git a/plugins/modules/systems_info.py b/plugins/modules/systems_info.py index be25d5f..2817938 100644 --- a/plugins/modules/systems_info.py +++ b/plugins/modules/systems_info.py @@ -2,6 +2,8 @@ from __future__ import absolute_import, division, print_function +__metaclass__ = type + DOCUMENTATION = r''' --- module: systems_info @@ -25,14 +27,14 @@ - SAP S-User Password. required: true type: str - no_log: true filter: description: - An ODATA filter expression to query the systems. required: true type: str author: - - SAP LinuxLab + - Matthias Winzeler (@MatthiasWinzeler) + - Marcel Mamula (@marcelmamula) ''' @@ -67,7 +69,7 @@ Version: "73554900100800000266" ''' -from ansible.module_utils.basic import AnsibleModule +from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ..module_utils.systems import main as systems_runner @@ -86,6 +88,8 @@ def run_module(): result = systems_runner.run_systems_info(module.params) if result.get('failed'): + if result.get('missing_dependency'): + module.fail_json(msg=missing_required_lib(result['missing_dependency'])) module.fail_json(**result) else: module.exit_json(**result) diff --git a/tests/.gitkeep b/tests/.gitkeep deleted file mode 100644 index e69de29..0000000 diff --git a/tests/sanity/ignore-2.14.txt b/tests/sanity/ignore-2.14.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.14.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.15.txt b/tests/sanity/ignore-2.15.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.15.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.16.txt b/tests/sanity/ignore-2.16.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.16.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.17.txt b/tests/sanity/ignore-2.17.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.17.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.18.txt b/tests/sanity/ignore-2.18.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.18.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.19.txt b/tests/sanity/ignore-2.19.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.19.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file diff --git a/tests/sanity/ignore-2.20.txt b/tests/sanity/ignore-2.20.txt new file mode 100644 index 0000000..ce5c04e --- /dev/null +++ b/tests/sanity/ignore-2.20.txt @@ -0,0 +1,5 @@ +plugins/modules/license_keys.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_files.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/maintenance_planner_stack_xml_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/software_center_download.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 +plugins/modules/systems_info.py validate-modules:missing-gplv3-license # Licensed under Apache 2.0 \ No newline at end of file