diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c5463ecdc..5ffd9b206 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,8 +28,12 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - if [ -f requirements-to-build.txt ]; then pip install -r requirements-to-build.txt; fi + python -m pip install poetry + poetry install + - name: Build package + run: | + poetry build + continue-on-error: false # Linting is done in the run_linters.sh script - name: Prep tests @@ -49,9 +53,10 @@ jobs: - name: Run linters working-directory: . run: | + python -m pip install ruff pylint flake8 chmod +x conf/run_linters.sh conf/run_linters.sh - #- name: Cache SonarQube packages + # - name: Cache SonarQube packages # uses: actions/cache@v4 # with: # path: ./.sonar diff --git a/cli/projects_cli.py b/cli/projects_cli.py index 4b275e545..5ab525861 100644 --- a/cli/projects_cli.py +++ b/cli/projects_cli.py @@ -24,7 +24,6 @@ """ -import sys import json from requests import RequestException diff --git a/conf/prep_all_tests.sh b/conf/prep_all_tests.sh index e0fe7876d..3f4712b6e 100755 --- a/conf/prep_all_tests.sh +++ b/conf/prep_all_tests.sh @@ -38,12 +38,14 @@ function create_fresh_project { fi curl -X POST -u "${usertoken}:" "${url}/api/projects/delete?project=${key}" conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" + conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" -Dsonar.branch.name=develop + conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" -Dsonar.branch.name=release-3.x return 0 } conf/run_linters.sh -create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_TEST:?}" "${SONAR_TOKEN_TEST_ADMIN_USER}" "${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" -nolint +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_TEST:?}" "${SONAR_TOKEN_TEST_ADMIN_USER}" "${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_LATEST:?}" "${SONAR_TOKEN_LATEST_ADMIN_USER}" "${SONAR_TOKEN_LATEST_ADMIN_ANALYSIS}" create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_CB:?}" "${SONAR_TOKEN_CB_ADMIN_USER}" "${SONAR_TOKEN_CB_ADMIN_ANALYSIS}" create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_9:?}" "${SONAR_TOKEN_9_ADMIN_USER}" "${SONAR_TOKEN_9_ADMIN_ANALYSIS}" diff --git a/conf/release.sh b/conf/release.sh index c7140b3d2..972c802d1 100755 --- a/conf/release.sh +++ b/conf/release.sh @@ -50,5 +50,5 @@ if [[ "${confirm}" = "y" ]]; then cd "${ROOT_DIR}" && docker pushrm olivierkorach/sonar-tools echo "Running scan" - "${CONF_DIR}/scan.sh" -test + "${CONF_DIR}/run_all.sh" -test fi \ No newline at end of file diff --git a/conf/run_linters.sh b/conf/run_linters.sh index fb00b7ff5..b134bb5b1 100755 --- a/conf/run_linters.sh +++ b/conf/run_linters.sh @@ -72,7 +72,7 @@ fi if [[ "${localbuild}" = "true" ]]; then if [[ "${linters_to_run}" == *"shellcheck"* ]]; then echo "===> Running shellcheck" - shellcheck $(find "${ROOT_DIR}" . -name '*.sh') \ + shellcheck "$(find "${ROOT_DIR}" . -name '*.sh')" \ -s bash -f json | jq | tee "${BUILD_DIR}/shellcheck-report.json" | "${CONF_DIR}"/shellcheck2sonar.py "${external_format}" > "${SHELLCHECK_REPORT}" [[ ! -s "${SHELLCHECK_REPORT}" ]] && rm -f "${SHELLCHECK_REPORT}" cat "${BUILD_DIR}/shellcheck-report.json" diff --git a/conf/run_tests.sh b/conf/run_tests.sh index af470a9ff..1f50fe679 100755 --- a/conf/run_tests.sh +++ b/conf/run_tests.sh @@ -45,7 +45,7 @@ do if [[ -d "${ROOT_DIR}/${GEN_LOC}/${target}/" ]]; then # Recreate a fresh TESTSYNC project for sync tests curl -X POST -u "${SONAR_TOKEN_TEST_ADMIN_USER}:" "${SONAR_HOST_URL_TEST}/api/projects/delete?project=${SYNC_PROJECT_KEY}" - conf/scan.sh -nolint -Dsonar.host.url="${SONAR_HOST_URL_TEST}" -Dsonar.projectKey="${SYNC_PROJECT_KEY}" -Dsonar.projectName="${SYNC_PROJECT_KEY}" -Dsonar.token="${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" + conf/run_scanner.sh -Dsonar.host.url="${SONAR_HOST_URL_TEST}" -Dsonar.projectKey="${SYNC_PROJECT_KEY}" -Dsonar.projectName="${SYNC_PROJECT_KEY}" -Dsonar.token="${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" # Run tests poetry run coverage run --branch --source="${ROOT_DIR}" -m pytest "${ROOT_DIR}/${GEN_LOC}/${target}/" --junit-xml="${BUILD_DIR}/xunit-results-${target}.xml" poetry run coverage xml -o "${BUILD_DIR}/coverage-${target}.xml" diff --git a/migration/build.sh b/migration/build.sh index fa94662eb..88867c2b1 100755 --- a/migration/build.sh +++ b/migration/build.sh @@ -23,20 +23,12 @@ ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" build_image=1 -release=0 -release_docker=0 while [[ $# -ne 0 ]]; do case "${1}" in nodocker) build_image=0 ;; - pypi) - release=1 - ;; - dockerhub) - release_docker=1 - ;; *) ;; esac diff --git a/poetry.lock b/poetry.lock index dd3a59496..12323ddbe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand. [[package]] name = "alabaster" @@ -1148,24 +1148,24 @@ files = [ [[package]] name = "setuptools" -version = "75.3.2" +version = "80.9.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "setuptools-75.3.2-py3-none-any.whl", hash = "sha256:90ab613b6583fc02d5369cbca13ea26ea0e182d1df2d943ee9cbe81d4c61add9"}, - {file = "setuptools-75.3.2.tar.gz", hash = "sha256:3c1383e1038b68556a382c1e8ded8887cd20141b0eb5708a6c8d277de49364f5"}, + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.5.2) ; sys_platform != \"cygwin\""] -core = ["importlib-metadata (>=6) ; python_version < \"3.10\"", "importlib-resources (>=5.10.2) ; python_version < \"3.9\"", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "ruff (<=0.7.1)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" diff --git a/sonar/app_branches.py b/sonar/app_branches.py index a3e22b77f..8d5a662e0 100644 --- a/sonar/app_branches.py +++ b/sonar/app_branches.py @@ -24,8 +24,6 @@ from typing import Optional import json -from http import HTTPStatus -from requests import RequestException from requests.utils import quote import sonar.logging as log @@ -112,11 +110,7 @@ def create(cls, app: object, name: str, project_branches: list[Branch]) -> Appli else: # Default main branch of project params["project"].append(obj.key) params["projectBranch"].append("") - try: - app.endpoint.post(ApplicationBranch.API[c.CREATE], params=params) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"creating branch {name} of {str(app)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{str(app)} branch '{name}", e.response.text) + app.endpoint.post(ApplicationBranch.API[c.CREATE], params=params) return ApplicationBranch(app=app, name=name, project_branches=project_branches) @classmethod @@ -201,10 +195,9 @@ def update(self, name: str, project_branches: list[Branch]) -> bool: params["projectBranch"].append(br_name) try: ok = self.post(ApplicationBranch.API[c.UPDATE], params=params).ok - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"updating {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: ApplicationBranch.CACHE.pop(self) - raise exceptions.ObjectNotFound(str(self), e.response.text) + raise self.name = name self._project_branches = project_branches diff --git a/sonar/applications.py b/sonar/applications.py index 0a52518e9..0738916b8 100644 --- a/sonar/applications.py +++ b/sonar/applications.py @@ -93,11 +93,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Application: o = Application.CACHE.get(key, endpoint.local_url) if o: return o - try: - data = json.loads(endpoint.get(Application.API[c.GET], params={"application": key}).text)["application"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"searching application {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Application key '{key}' not found") + data = json.loads(endpoint.get(Application.API[c.GET], params={"application": key}).text)["application"] return cls.load(endpoint, data) @classmethod @@ -132,11 +128,7 @@ def create(cls, endpoint: pf.Platform, key: str, name: str) -> Application: :rtype: Application """ check_supported(endpoint) - try: - endpoint.post(Application.API["CREATE"], params={"key": key, "name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating application {key}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(key, e.response.text) + endpoint.post(Application.API["CREATE"], params={"key": key, "name": name}) log.info("Creating object") return Application(endpoint=endpoint, key=key, name=name) @@ -151,10 +143,9 @@ def refresh(self) -> None: self.reload(json.loads(self.get("navigation/component", params={"component": self.key}).text)) self.reload(json.loads(self.get(Application.API[c.GET], params=self.api_params(c.GET)).text)["application"]) self.projects() - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"refreshing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Application.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + raise def __str__(self) -> str: """String name of object""" diff --git a/sonar/branches.py b/sonar/branches.py index 2f22b6256..115e96452 100644 --- a/sonar/branches.py +++ b/sonar/branches.py @@ -24,8 +24,8 @@ from http import HTTPStatus from typing import Optional import json +import re from urllib.parse import unquote -from requests import HTTPError, RequestException import requests.utils from sonar import platform @@ -89,16 +89,11 @@ def get_object(cls, concerned_object: projects.Project, branch_name: str) -> Bra o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) if o: return o - try: - data = json.loads(concerned_object.get(Branch.API[c.LIST], params={"project": concerned_object.key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"searching {str(concerned_object)} for branch '{branch_name}'", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") - - for br in data.get("branches", []): - if br["name"] == branch_name: - return cls.load(concerned_object, branch_name, br) - raise exceptions.ObjectNotFound(branch_name, f"Branch '{branch_name}' of {str(concerned_object)} not found") + data = json.loads(concerned_object.get(Branch.API[c.LIST], params={"project": concerned_object.key}).text) + br = next((b for b in data.get("branches", []) if b["name"] == branch_name), None) + if not br: + raise exceptions.ObjectNotFound(branch_name, f"Branch '{branch_name}' of {str(concerned_object)} not found") + return cls.load(concerned_object, branch_name, br) @classmethod def load(cls, concerned_object: projects.Project, branch_name: str, data: types.ApiPayload) -> Branch: @@ -112,9 +107,11 @@ def load(cls, concerned_object: projects.Project, branch_name: str, data: types. """ branch_name = unquote(branch_name) o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) + br_data = next((br for br in data.get("branches", []) if br["name"] == branch_name), None) if not o: o = cls(concerned_object, branch_name) - o._load(data) + if br_data: + o._load(br_data) return o def __str__(self) -> str: @@ -135,44 +132,33 @@ def refresh(self) -> Branch: :return: itself :rtype: Branch """ - try: - data = json.loads(self.get(Branch.API[c.LIST], params=self.api_params(c.LIST)).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"refreshing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found in SonarQube") - for br in data.get("branches", []): - if br["name"] == self.name: - self._load(br) - else: - # While we're there let's load other branches with up to date branch data - Branch.load(self.concerned_object, br["name"], data) + data = json.loads(self.get(Branch.API[c.LIST], params=self.api_params(c.LIST)).text) + br_data = next((br for br in data.get("branches", []) if br["name"] == self.name), None) + if not br_data: + Branch.CACHE.clear() + raise exceptions.ObjectNotFound(self.name, f"{str(self)} not found") + self._load(br_data) + # While we're there let's load other branches with up to date branch data + for br in [b for b in data.get("branches", []) if b["name"] != self.name]: + Branch.load(self.concerned_object, br["name"], data) return self def _load(self, data: types.ApiPayload) -> None: - if self.sq_json is None: - self.sq_json = data - else: - self.sq_json.update(data) + log.debug("Loading %s with data %s", self, data) + self.sq_json = (self.sq_json or {}) | data self._is_main = self.sq_json["isMain"] self._last_analysis = util.string_to_date(self.sq_json.get("analysisDate", None)) self._keep_when_inactive = self.sq_json.get("excludedFromPurge", False) self._is_main = self.sq_json.get("isMain", False) def is_kept_when_inactive(self) -> bool: - """ - :return: Whether the branch is kept when inactive - :rtype: bool - """ + """Returns whether the branch is kept when inactive""" if self._keep_when_inactive is None or self.sq_json is None: self.refresh() return self._keep_when_inactive def is_main(self) -> bool: - """ - :return: Whether the branch is the project main branch - :rtype: bool - """ + """Returns whether the branch is the project main branch""" if self._is_main is None or self.sq_json is None: self.refresh() return self._is_main @@ -186,11 +172,32 @@ def delete(self) -> bool: """ try: return super().delete() - except (ConnectionError, RequestException) as e: - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("Can't delete %s, it's the main branch", str(self)) + except exceptions.SonarException as e: + log.warning(e.message) return False + def get( + self, api: str, params: types.ApiParams = None, data: Optional[str] = None, mute: tuple[HTTPStatus] = (), **kwargs: str + ) -> requests.Response: + """Performs an HTTP GET request for the object""" + try: + return super().get(api=api, params=params, data=data, mute=mute, **kwargs) + except exceptions.ObjectNotFound as e: + if re.match(r"Project .+ not found", e.message): + log.warning("Clearing project cache") + projects.Project.CACHE.clear() + raise + + def post(self, api: str, params: types.ApiParams = None, mute: tuple[HTTPStatus] = (), **kwargs: str) -> requests.Response: + """Performs an HTTP POST request for the object""" + try: + return super().post(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound as e: + if re.match(r"Project .+ not found", e.message): + log.warning("Clearing project cache") + projects.Project.CACHE.clear() + raise + def new_code(self) -> str: """ :return: The branch new code period definition @@ -199,13 +206,7 @@ def new_code(self) -> str: if self._new_code is None and self.endpoint.is_sonarcloud(): self._new_code = settings.new_code_to_string({"inherited": True}) elif self._new_code is None: - try: - data = json.loads(self.get(api=Branch.API["get_new_code"], params=self.api_params(c.LIST)).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting new code period of {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.concerned_object.key, f"{str(self.concerned_object)} not found") - + data = json.loads(self.get(api=Branch.API["get_new_code"], params=self.api_params(c.LIST)).text) for b in data["newCodePeriods"]: new_code = settings.new_code_to_string(b) if b["branchKey"] == self.name: @@ -245,12 +246,9 @@ def set_keep_when_inactive(self, keep: bool) -> bool: :return: Whether the operation was successful """ log.info("Setting %s keep when inactive to %s", self, keep) - try: - self.post("project_branches/set_automatic_deletion_protection", params=self.api_params() | {"value": str(keep).lower()}) + ok = self.post("project_branches/set_automatic_deletion_protection", params=self.api_params() | {"value": str(keep).lower()}).ok + if ok: self._keep_when_inactive = keep - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} keep when inactive to {keep}", catch_all=True) - return False return True def set_as_main(self) -> bool: @@ -258,11 +256,7 @@ def set_as_main(self) -> bool: :return: Whether the operation was successful """ - try: - self.post("api/project_branches/set_main", params=self.api_params()) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} as main branch", catch_all=True) - return False + self.post("api/project_branches/set_main", params=self.api_params()) for b in self.concerned_object.branches().values(): b._is_main = b.name == self.name return True @@ -317,30 +311,21 @@ def rename(self, new_name: str) -> bool: log.debug("Skipping rename %s with same new name", str(self)) return False log.info("Renaming main branch of %s from '%s' to '%s'", str(self.concerned_object), self.name, new_name) - try: - self.post(Branch.API[c.RENAME], params={"project": self.concerned_object.key, "name": new_name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"Renaming {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND, HTTPStatus.BAD_REQUEST)) - if isinstance(e, HTTPError): - if e.response.status_code == HTTPStatus.NOT_FOUND: - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.concerned_object.key, f"str{self.concerned_object} not found") - if e.response.status_code == HTTPStatus.BAD_REQUEST: - return False + self.post(Branch.API[c.RENAME], params={"project": self.concerned_object.key, "name": new_name}) Branch.CACHE.pop(self) self.name = new_name Branch.CACHE.put(self) return True - def get_findings(self) -> dict[str, object]: + def get_findings(self, filters: Optional[types.ApiParams] = None) -> dict[str, object]: """Returns a branch list of findings :return: dict of Findings, with finding key as key :rtype: dict{key: Finding} """ - findings = self.get_issues() - findings.update(self.get_hotspots()) - return findings + if not filters: + return self.concerned_object.get_findings(branch=self.name) + return self.get_issues(filters) | self.get_hotspots(filters) def component_data(self) -> dict[str, str]: """Returns key data""" diff --git a/sonar/components.py b/sonar/components.py index 246570a0a..782d6b1f3 100644 --- a/sonar/components.py +++ b/sonar/components.py @@ -118,7 +118,7 @@ def get_issues(self, filters: types.ApiParams = None) -> dict[str, object]: """Returns list of issues for a component, optionally on branches or/and PRs""" from sonar.issues import search_all - filters = {k: list(set(v) if isinstance(v, (list, set, tuple)) else v) for k, v in (filters or {}).items() if v is not None} + filters = {k: list(set(v)) if isinstance(v, (list, set, tuple)) else v for k, v in (filters or {}).items() if v is not None} log.info("Searching issues for %s with filters %s", str(self), str(filters)) issue_list = search_all(endpoint=self.endpoint, params=self.api_params() | {"additionalFields": "comments"} | filters) self.nbr_issues = len(issue_list) diff --git a/sonar/devops.py b/sonar/devops.py index 224608c93..1db7e129f 100644 --- a/sonar/devops.py +++ b/sonar/devops.py @@ -22,11 +22,8 @@ from __future__ import annotations from typing import Optional, Union -from http import HTTPStatus import json -from requests import RequestException - import sonar.logging as log from sonar.util import types, cache from sonar import platform @@ -109,11 +106,10 @@ def create(cls, endpoint: platform.Platform, key: str, plt_type: str, url_or_wor elif plt_type == "bitbucketcloud": params.update({"clientSecret": _TO_BE_SET, "clientId": _TO_BE_SET, "workspace": url_or_workspace}) endpoint.post(_CREATE_API_BBCLOUD, params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating devops platform {key}/{plt_type}/{url_or_workspace}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException as e: if endpoint.edition() in (c.CE, c.DE): log.warning("Can't set DevOps platform '%s', don't you have more that 1 of that type?", key) - raise exceptions.UnsupportedOperation(f"Can't set DevOps platform '{key}', don't you have more that 1 of that type?") + raise exceptions.UnsupportedOperation(e.message) from e o = DevopsPlatform(endpoint=endpoint, key=key, platform_type=plt_type) o.refresh() return o @@ -185,8 +181,7 @@ def update(self, **kwargs) -> bool: ok = self.post(f"alm_settings/update_{alm_type}", params=params).ok self.url = kwargs["url"] self._specific = {k: v for k, v in params.items() if k not in ("key", "url")} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"updating devops platform {self.key}/{alm_type}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException: ok = False return ok diff --git a/sonar/findings.py b/sonar/findings.py index 784654d40..0372f5931 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -23,8 +23,7 @@ import concurrent.futures from datetime import datetime from typing import Optional -from http import HTTPStatus -from requests import RequestException +import re import Levenshtein import sonar.logging as log @@ -32,6 +31,7 @@ import sonar.platform as pf from sonar.util import types from sonar.util import constants as c, issue_defs as idefs +from sonar import exceptions import sonar.utilities as util from sonar import projects, rules @@ -182,7 +182,7 @@ def assign(self, assignee: Optional[str] = None) -> str: def language(self) -> str: """Returns the finding language""" - return rules.get_object(endpoint=self.endpoint, key=self.rule).language + return rules.Rule.get_object(endpoint=self.endpoint, key=self.rule).language def to_csv(self, without_time: bool = False) -> list[str]: """ @@ -458,9 +458,10 @@ def search_siblings( def do_transition(self, transition: str) -> bool: try: return self.post("issues/do_transition", {"issue": self.key, "transition": transition}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"applying transition {transition}", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - return False + except exceptions.SonarException as e: + if re.match(r"Transition from state [A-Za-z]+ does not exist", e.message): + raise exceptions.UnsupportedOperation(e.message) from e + raise def get_branch_and_pr(self, data: types.ApiPayload) -> tuple[Optional[str], Optional[str]]: """ diff --git a/sonar/groups.py b/sonar/groups.py index b6e44a8b5..a747e5f91 100644 --- a/sonar/groups.py +++ b/sonar/groups.py @@ -26,9 +26,6 @@ from typing import Optional -from http import HTTPStatus -from requests import HTTPError, RequestException - import sonar.logging as log import sonar.platform as pf import sonar.sqobject as sq @@ -115,11 +112,7 @@ def create(cls, endpoint: pf.Platform, name: str, description: str = None) -> Gr :return: The group object """ log.debug("Creating group '%s'", name) - try: - data = json.loads(endpoint.post(Group.api_for(c.CREATE, endpoint), params={"name": name, "description": description}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating group '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, util.sonar_error(e.response)) + data = json.loads(endpoint.post(Group.api_for(c.CREATE, endpoint), params={"name": name, "description": description}).text) o = cls.read(endpoint=endpoint, name=name) o.sq_json.update(data) return o @@ -169,10 +162,10 @@ def delete(self) -> bool: ok = self.post(api=Group.API_V1[c.DELETE], params=self.api_params(c.DELETE)).ok if ok: log.info("Removing from %s cache", str(self.__class__.__name__)) - self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + Group.CACHE.pop(self) + except exceptions.ObjectNotFound: + Group.CACHE.pop(self) + raise return ok def api_params(self, op: str) -> types.ApiParams: @@ -227,21 +220,11 @@ def add_user(self, user: object) -> bool: :return: Whether the operation succeeded """ log.info("Adding %s to %s", str(user), str(self)) - try: - if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: - params = {"groupId": self.id, "userId": user.id} - else: - params = {"login": user.login, "name": self.name} - r = self.post(Group.api_for(ADD_USER, self.endpoint), params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, "adding user to group", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - if isinstance(e, HTTPError): - code = e.response.status_code - if code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(util.sonar_error(e.response)) - if code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(user.login, util.sonar_error(e.response)) - return r.ok + if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: + params = {"groupId": self.id, "userId": user.id} + else: + params = {"login": user.login, "name": self.name} + return self.post(Group.api_for(ADD_USER, self.endpoint), params=params).ok def remove_user(self, user: object) -> bool: """Removes a user from the group @@ -251,24 +234,14 @@ def remove_user(self, user: object) -> bool: :rtype: bool """ log.info("Removing %s from %s", str(user), str(self)) - try: - if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: - for m in json.loads(self.get(MEMBERSHIP_API, params={"userId": user.id}).text)["groupMemberships"]: - if m["groupId"] == self.id: - return self.endpoint.delete(f"{Group.api_for(REMOVE_USER, self.endpoint)}/{m['id']}").ok - raise exceptions.ObjectNotFound(user.login, f"{str(self)} or user id '{user.id} not found") - else: - params = {"login": user.login, "name": self.name} - return self.post(Group.api_for(REMOVE_USER, self.endpoint), params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, "removing user from group", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - if isinstance(e, HTTPError): - code = e.response.status_code - if code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(util.sonar_error(e.response)) - if code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(user.login, util.sonar_error(e.response)) - return False + if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: + for m in json.loads(self.get(MEMBERSHIP_API, params={"userId": user.id}).text)["groupMemberships"]: + if m["groupId"] == self.id: + return self.endpoint.delete(f"{Group.api_for(REMOVE_USER, self.endpoint)}/{m['id']}").ok + raise exceptions.ObjectNotFound(user.login, f"{str(self)} or user id '{user.id} not found") + else: + params = {"login": user.login, "name": self.name} + return self.post(Group.api_for(REMOVE_USER, self.endpoint), params=params).ok def audit(self, audit_settings: types.ConfigSettings = None) -> list[Problem]: """Audits a group and return list of problems found @@ -410,9 +383,8 @@ def get_object_from_id(endpoint: pf.Platform, id: str) -> Group: raise exceptions.UnsupportedOperation("Operation unsupported before SonarQube 10.4") if len(Group.CACHE) == 0: get_list(endpoint) - for o in Group.CACHE.values(): - if o.id == id: - return o + if gr := next((o for o in Group.CACHE.values() if o.id == id), None): + return gr raise exceptions.ObjectNotFound(id, message=f"Group '{id}' not found") diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 344befb1d..fb168ad44 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -25,7 +25,6 @@ from datetime import datetime from typing import Optional from http import HTTPStatus -from requests import RequestException import requests.utils import sonar.logging as log @@ -36,6 +35,7 @@ from sonar import users from sonar import findings, rules, changelog +from sonar import exceptions PROJECT_FILTER = "project" PROJECT_FILTER_OLD = "projectKey" @@ -152,19 +152,18 @@ def refresh(self) -> bool: self.rule = d["rule"]["key"] self.assignee = d.get("assignee", None) return resp.ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, "refreshing hotspot", catch_all=True) + except exceptions.SonarException: return False def __mark_as(self, resolution: Optional[str], comment: Optional[str] = None, status: str = "REVIEWED") -> bool: try: params = util.remove_nones({"hotspot": self.key, "status": status, "resolution": resolution, "commemt": comment}) - r = self.post("hotspots/change_status", params=params) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, f"marking hotspot as {status}/{resolution}", catch_all=True) + ok = self.post("hotspots/change_status", params=params).ok + self.refresh() + except exceptions.SonarException: return False - self.refresh() - return r.ok + else: + return ok def mark_as_safe(self) -> bool: """Marks a hotspot as safe @@ -215,8 +214,7 @@ def add_comment(self, comment: str) -> bool: """ try: return self.post("hotspots/add_comment", params={"hotspot": self.key, "comment": comment}).ok - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "adding comment to hotspot", catch_all=True) + except exceptions.SonarException: return False def assign(self, assignee: Optional[str], comment: Optional[str] = None) -> bool: @@ -231,13 +229,13 @@ def assign(self, assignee: Optional[str], comment: Optional[str] = None) -> bool log.debug("Unassigning %s", str(self)) else: log.debug("Assigning %s to '%s'", str(self), str(assignee)) - r = self.post("hotspots/assign", util.remove_nones({"hotspot": self.key, "assignee": assignee, "comment": comment})) - if r.ok: + ok = self.post("hotspots/assign", util.remove_nones({"hotspot": self.key, "assignee": assignee, "comment": comment})).ok + if ok: self.assignee = assignee - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "assigning/unassigning hotspot", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def unassign(self, comment: Optional[str] = None) -> bool: """Unassigns a hotspot (and optionally comment) @@ -421,8 +419,7 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str, try: data = json.loads(endpoint.get(Hotspot.API[c.SEARCH], params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text) nbr_hotspots = util.nbr_total_elements(data) - except (ConnectionError, RequestException) as e: - util.handle_error(e, "searching hotspots", catch_all=True) + except exceptions.SonarException: nbr_hotspots = 0 return {} nbr_pages = util.nbr_pages(data) @@ -500,7 +497,7 @@ def post_search_filter(hotspots_dict: dict[str, Hotspot], filters: types.ApiPara log.debug("%d hotspots remaining after filtering by createdBefore %s", len(filtered_findings), str(filters["createdBefore"])) if "languages" in filters: filtered_findings = { - k: v for k, v in filtered_findings.items() if rules.get_object(endpoint=v.endpoint, key=v.rule).language in filters["languages"] + k: v for k, v in filtered_findings.items() if rules.Rule.get_object(endpoint=v.endpoint, key=v.rule).language in filters["languages"] } log.debug("%d hotspots remaining after filtering by languages %s", len(filtered_findings), str(filters["languages"])) log.debug("%d hotspots remaining after post search filtering", len(filtered_findings)) diff --git a/sonar/issues.py b/sonar/issues.py index 4bbdb14f3..35ecf9d14 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -293,19 +293,13 @@ def add_comment(self, comment: str) -> bool: """ log.debug("Adding comment '%s' to %s", comment, str(self)) try: - r = self.post("issues/add_comment", {"issue": self.key, "text": comment}) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "adding comment", catch_all=True) + return self.post("issues/add_comment", {"issue": self.key, "text": comment}).ok + except exceptions.SonarException: return False - return r.ok def __set_severity(self, **params) -> bool: - try: - log.debug("Changing severity of %s from '%s' to '%s'", str(self), self.severity, str(params)) - r = self.post("issues/set_severity", {"issue": self.key, **params}) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "changing issue severity", catch_all=True) - return False + log.debug("Changing severity of %s from '%s' to '%s'", str(self), self.severity, str(params)) + r = self.post("issues/set_severity", {"issue": self.key, **params}) return r.ok def set_severity(self, severity: str) -> bool: @@ -347,13 +341,12 @@ def assign(self, assignee: Optional[str] = None) -> bool: try: params = util.remove_nones({"issue": self.key, "assignee": assignee}) log.debug("Assigning %s to '%s'", str(self), str(assignee)) - r = self.post("issues/assign", params) - if r.ok: + if ok := self.post("issues/assign", params).ok: self.assignee = assignee - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "assigning issue", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def get_tags(self, **kwargs) -> list[str]: """Returns issues tags""" @@ -399,13 +392,12 @@ def set_type(self, new_type: str) -> bool: raise exceptions.UnsupportedOperation("Changing issue type is not supported in MQR mode") log.debug("Changing type of issue %s from %s to %s", self.key, self.type, new_type) try: - r = self.post("issues/set_type", {"issue": self.key, "type": new_type}) - if r.ok: + if ok := self.post("issues/set_type", {"issue": self.key, "type": new_type}).ok: self.type = new_type - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "setting issue type", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def is_wont_fix(self) -> bool: """ diff --git a/sonar/measures.py b/sonar/measures.py index 9f5e2ba0e..77433697e 100644 --- a/sonar/measures.py +++ b/sonar/measures.py @@ -23,8 +23,6 @@ from __future__ import annotations import json -from http import HTTPStatus -from requests import RequestException from sonar import metrics, exceptions, platform from sonar.util.types import ApiPayload, ApiParams, KeyList from sonar.util import cache, constants as c @@ -140,11 +138,7 @@ def get(concerned_object: object, metrics_list: KeyList, **kwargs) -> dict[str, params["metricKeys"] = util.list_to_csv(metrics_list) log.debug("Getting measures with %s", str(params)) - try: - data = json.loads(concerned_object.endpoint.get(Measure.API_READ, params={**kwargs, **params}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting measures {str(metrics_list)} of {str(concerned_object)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") + data = json.loads(concerned_object.endpoint.get(Measure.API_READ, params={**kwargs, **params}).text) m_dict = dict.fromkeys(metrics_list, None) for m in data["component"]["measures"]: m_dict[m["metric"]] = Measure.load(data=m, concerned_object=concerned_object) @@ -169,15 +163,7 @@ def get_history(concerned_object: object, metrics_list: KeyList, **kwargs) -> li params["metrics"] = util.list_to_csv(metrics_list) log.debug("Getting measures history with %s", str(params)) - try: - data = json.loads(concerned_object.endpoint.get(Measure.API_HISTORY, params={**kwargs, **params}).text) - except (ConnectionError, RequestException) as e: - util.handle_error( - e, - f"getting measures {str(metrics_list)} history of {str(concerned_object)}", - catch_http_statuses=(HTTPStatus.NOT_FOUND,), - ) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") + data = json.loads(concerned_object.endpoint.get(Measure.API_HISTORY, params={**kwargs, **params}).text) res_list = [] for m in reversed(data["measures"]): res_list += [[dt["date"], m["metric"], dt["value"]] for dt in m["history"] if "value" in dt] diff --git a/sonar/metrics.py b/sonar/metrics.py index f77b06077..fc21d5cd2 100644 --- a/sonar/metrics.py +++ b/sonar/metrics.py @@ -105,8 +105,7 @@ def __init__(self, endpoint: pf.Platform, key: str, data: ApiPayload = None) -> @classmethod def get_object(cls, endpoint: pf.Platform, key: str) -> Metric: search(endpoint=endpoint) - o = Metric.CACHE.get(key, endpoint.local_url) - if not o: + if not (o := Metric.CACHE.get(key, endpoint.local_url)): raise exceptions.ObjectNotFound(key, f"Metric key '{key}' not found") return o @@ -156,26 +155,17 @@ def search(endpoint: pf.Platform, show_hidden_metrics: bool = False, use_cache: def is_a_rating(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is a rating""" - try: - return Metric.get_object(endpoint, metric_key).is_a_rating() - except exceptions.ObjectNotFound: - return False + return Metric.get_object(endpoint, metric_key).is_a_rating() def is_a_percent(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is a percent""" - try: - return Metric.get_object(endpoint, metric_key).is_a_percent() - except exceptions.ObjectNotFound: - return False + return Metric.get_object(endpoint, metric_key).is_a_percent() def is_an_effort(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is an effort""" - try: - return Metric.get_object(endpoint, metric_key).is_an_effort() - except exceptions.ObjectNotFound: - return False + Metric.get_object(endpoint, metric_key).is_an_effort() def count(endpoint: pf.Platform, use_cache: bool = True) -> int: diff --git a/sonar/organizations.py b/sonar/organizations.py index c35c05726..57627a486 100644 --- a/sonar/organizations.py +++ b/sonar/organizations.py @@ -25,9 +25,7 @@ from __future__ import annotations import json -from http import HTTPStatus from threading import Lock -from requests import RequestException import sonar.logging as log import sonar.platform as pf @@ -73,15 +71,9 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Organization: """ if not endpoint.is_sonarcloud(): raise exceptions.UnsupportedOperation(_NOT_SUPPORTED) - o = Organization.CACHE.get(key, endpoint.local_url) - if o: + if o := Organization.CACHE.get(key, endpoint.local_url): return o - try: - data = json.loads(endpoint.get(Organization.API[c.SEARCH], params={"organizations": key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting organization {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Organization '{key}' not found") - + data = json.loads(endpoint.get(Organization.API[c.SEARCH], params={"organizations": key}).text) if len(data["organizations"]) == 0: raise exceptions.ObjectNotFound(key, f"Organization '{key}' not found") return cls.load(endpoint, data["organizations"][0]) @@ -192,8 +184,8 @@ def exists(endpoint: pf.Platform, org_key: str) -> bool: log.info("Verifying that organization '%s' exists", org_key) try: _ = Organization.get_object(endpoint=endpoint, key=org_key) - log.warning("Organization '%s' does not exist or user is not a member", org_key) except exceptions.ObjectNotFound: + log.warning("Organization '%s' does not exist or user is not a member", org_key) return False log.debug("Organization '%s' exists and user is a member", org_key) return True diff --git a/sonar/permissions/permission_templates.py b/sonar/permissions/permission_templates.py index fc2f60c05..627ce911a 100644 --- a/sonar/permissions/permission_templates.py +++ b/sonar/permissions/permission_templates.py @@ -24,11 +24,10 @@ import json import re -from requests import RequestException import sonar.logging as log from sonar.util import types, cache -from sonar import sqobject, utilities +from sonar import sqobject, utilities, exceptions from sonar.permissions import template_permissions import sonar.platform as pf from sonar.audit.rules import get_rule, RuleId @@ -136,7 +135,7 @@ def permissions(self) -> template_permissions.TemplatePermissions: self._permissions = template_permissions.TemplatePermissions(self) return self._permissions - def set_as_default(self, what_list: list[str]) -> None: + def set_as_default(self, what_list: list[str]) -> bool: """Sets a permission template as default for projects or apps or portfolios""" log.debug("Setting %s as default for %s", str(self), str(what_list)) ed = self.endpoint.edition() @@ -146,9 +145,10 @@ def set_as_default(self, what_list: list[str]) -> None: log.warning("Can't set permission template as default for %s on a %s edition", qual, ed) continue try: - self.post("permissions/set_default_template", params={"templateId": self.key, "qualifier": qual}) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting {str(self)} as default") + return self.post("permissions/set_default_template", params={"templateId": self.key, "qualifier": qual}).ok + except exceptions.SonarException: + return False + return False def set_pattern(self, pattern: str) -> PermissionTemplate: """Sets a permission template pattern""" diff --git a/sonar/permissions/permissions.py b/sonar/permissions/permissions.py index 4b5e4aa00..4ae1e9b84 100644 --- a/sonar/permissions/permissions.py +++ b/sonar/permissions/permissions.py @@ -25,10 +25,9 @@ import json from abc import ABC, abstractmethod -from requests import RequestException import sonar.logging as log -from sonar import utilities +from sonar import utilities, exceptions from sonar.util import types from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem @@ -299,15 +298,14 @@ def _get_api(self, api: str, perm_type: str, ret_field: str, **extra_params) -> else: counter += 1 page, nbr_pages = page + 1, utilities.nbr_pages(data) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting permissions of {str(self)}", catch_all=True) + except exceptions.SonarException: page += 1 return perms def _post_api(self, api: str, set_field: str, perms_dict: types.JsonPermissions, **extra_params) -> bool: if perms_dict is None: return True - result = False + ok = True params = extra_params.copy() for u, perms in perms_dict.items(): params[set_field] = u @@ -315,11 +313,10 @@ def _post_api(self, api: str, set_field: str, perms_dict: types.JsonPermissions, for p in filtered_perms: params["permission"] = p try: - r = self.endpoint.post(api, params=params) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting permissions of {str(self)}", catch_all=True) - result = result and r.ok - return result + ok = self.endpoint.post(api, params=params).ok and ok + except exceptions.SonarException: + ok = False + return ok def simplify(perms_dict: dict[str, list[str]]) -> Optional[dict[str, str]]: diff --git a/sonar/permissions/project_permissions.py b/sonar/permissions/project_permissions.py index 2e4ec9d83..989c91627 100644 --- a/sonar/permissions/project_permissions.py +++ b/sonar/permissions/project_permissions.py @@ -72,6 +72,9 @@ def _set_perms( self.read() for p in permissions.PERMISSION_TYPES: to_remove = diff_func(self.permissions.get(p, {}), new_perms.get(p, {})) + if p == "users" and "admin" in to_remove: + # Don't remove admin permission to the admin user, this is not possible anyway + to_remove["admin"] = [v for v in to_remove["admin"] if v != "admin"] self._post_api(apis["remove"][p], field[p], to_remove, **kwargs) to_add = diff_func(new_perms.get(p, {}), self.permissions.get(p, {})) self._post_api(apis["add"][p], field[p], to_add, **kwargs) diff --git a/sonar/permissions/quality_permissions.py b/sonar/permissions/quality_permissions.py index 4f5ceeabf..e3a2bb0e0 100644 --- a/sonar/permissions/quality_permissions.py +++ b/sonar/permissions/quality_permissions.py @@ -24,11 +24,10 @@ from typing import Optional import json -from requests import RequestException from sonar.util import types import sonar.logging as log -from sonar import utilities +from sonar import utilities, exceptions from sonar.audit.problem import Problem from sonar.permissions import permissions @@ -86,8 +85,7 @@ def _get_api(self, api: str, perm_type: tuple[str, ...], ret_field: str, **extra data = json.loads(resp.text) perms += [p[ret_field] for p in data[perm_type]] page, nbr_pages = page + 1, utilities.nbr_pages(data) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting permissions of {str(self)}", catch_all=True) + except exceptions.SonarException: page += 1 return perms diff --git a/sonar/platform.py b/sonar/platform.py index 7ad04d12a..364a86b5d 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -28,6 +28,7 @@ from http import HTTPStatus import sys import os +import re from typing import Optional import time import datetime @@ -264,9 +265,8 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N headers["Authorization"] = f"Bearer {self.__token}" if with_org: params["organization"] = self.organization - req_type, url = "", "" + req_type, url = getattr(request, "__name__", repr(request)).upper(), "" if log.get_level() <= log.DEBUG: - req_type = getattr(request, "__name__", repr(request)).upper() url = self.__urlstring(api, params, kwargs.get("data", {})) log.debug("%s: %s", req_type, url) kwargs["headers"] = headers @@ -288,10 +288,26 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N self.local_url = new_url r.raise_for_status() except HTTPError as e: - lvl = log.DEBUG if r.status_code in mute else log.ERROR + code = r.status_code + lvl = log.DEBUG if code in mute else log.ERROR log.log(lvl, "%s (%s request)", util.error_msg(e), req_type) - raise e - except (ConnectionError, RequestException) as e: + err_msg = util.sonar_error(e.response) + err_msg_lower = err_msg.lower() + key = next((params[k] for k in ("key", "project", "component", "componentKey") if k in params), "Unknown") + if any( + msg in err_msg_lower for msg in ("not found", "no quality gate has been found", "does not exist", "could not find") + ): # code == HTTPStatus.NOT_FOUND: + raise exceptions.ObjectNotFound(key, err_msg) from e + if any(msg in err_msg_lower for msg in ("already exists", "already been taken")): + raise exceptions.ObjectAlreadyExists(key, err_msg) from e + if re.match(r"(Value of parameter .+ must be one of|No enum constant)", err_msg): + raise exceptions.UnsupportedOperation(err_msg) from e + if any(msg in err_msg_lower for msg in ("insufficient privileges", "insufficient permissions")): + raise exceptions.SonarException(err_msg, errcodes.SONAR_API_AUTHORIZATION) from e + if "unknown url" in err_msg_lower: + raise exceptions.UnsupportedOperation(err_msg) from e + raise exceptions.SonarException(err_msg, errcodes.SONAR_API) from e + except ConnectionError as e: util.handle_error(e, "") return r diff --git a/sonar/portfolio_reference.py b/sonar/portfolio_reference.py index 55c58a6c6..4343ca0c3 100644 --- a/sonar/portfolio_reference.py +++ b/sonar/portfolio_reference.py @@ -24,14 +24,12 @@ """ from __future__ import annotations -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.platform as pf from sonar.util import types, cache -from sonar import exceptions, utilities +from sonar import exceptions import sonar.sqobject as sq import sonar.util.constants as c @@ -70,14 +68,7 @@ def load(cls, reference: object, parent: object) -> PortfolioReference: @classmethod def create(cls, reference: object, parent: object, params: types.ApiParams = None) -> PortfolioReference: """Constructor, don't use - use class methods instead""" - - try: - parent.endpoint.post("views/add_portfolio", params={"portfolio": parent.key, "reference": reference.key}) - except (ConnectionError, RequestException) as e: - utilities.handle_error( - e, f"creating portfolio reference to {str(reference)} in {str(parent)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,) - ) - raise exceptions.ObjectAlreadyExists + parent.endpoint.post("views/add_portfolio", params={"portfolio": parent.key, "reference": reference.key}) return PortfolioReference(reference=reference, parent=parent) def __str__(self) -> str: diff --git a/sonar/portfolios.py b/sonar/portfolios.py index a5ece78ff..9057f5f42 100644 --- a/sonar/portfolios.py +++ b/sonar/portfolios.py @@ -30,7 +30,6 @@ import json from http import HTTPStatus from threading import Lock -from requests import HTTPError, RequestException import sonar.logging as log import sonar.platform as pf @@ -257,24 +256,18 @@ def sub_portfolios(self, full: bool = False) -> dict[str, Portfolio]: def add_reference_subportfolio(self, reference: Portfolio) -> object: ref = PortfolioReference.create(parent=self, reference=reference) - try: - if self.endpoint.version() >= (9, 3, 0): - self.post("views/add_portfolio", params={"portfolio": self.key, "reference": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) - else: - self.post("views/add_local_view", params={"key": self.key, "ref_key": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding reference subportfolio to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if self.endpoint.version() >= (9, 3, 0): + self.post("views/add_portfolio", params={"portfolio": self.key, "reference": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) + else: + self.post("views/add_local_view", params={"key": self.key, "ref_key": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) self._sub_portfolios.update({reference.key: ref}) return ref def add_standard_subportfolio(self, key: str, name: str, **kwargs) -> Portfolio: """Adds a subportfolio""" subp = Portfolio.create(endpoint=self.endpoint, key=key, name=name, parent=self, **kwargs) - try: - if self.endpoint.version() < (9, 3, 0): - self.post("views/add_sub_view", params={"key": self.key, "name": name, "subKey": key}, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding standard subportfolio to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if self.endpoint.version() < (9, 3, 0): + self.post("views/add_sub_view", params={"key": self.key, "name": name, "subKey": key}, mute=(HTTPStatus.BAD_REQUEST,)) self._sub_portfolios.update({subp.key: subp}) return subp @@ -412,13 +405,9 @@ def add_projects(self, projects: set[str]) -> Portfolio: try: self.post("views/add_project", params={"key": self.key, "project": key}, mute=(HTTPStatus.BAD_REQUEST,)) self._selection_mode[_SELECTION_MODE_MANUAL][key] = {c.DEFAULT_BRANCH} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding projects to {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND, HTTPStatus.BAD_REQUEST)) - if e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("%s: Project '%s' already in %s", util.error_msg(e), key, str(self)) - else: - Portfolio.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"Project '{key}' not found, can't be added to {str(self)}") + except exceptions.ObjectNotFound: + Portfolio.CACHE.pop(self) + raise return self def add_project_branches(self, project_key: str, branches: set[str]) -> Portfolio: @@ -431,16 +420,7 @@ def add_project_branches(self, project_key: str, branches: set[str]) -> Portfoli return self def add_project_branch(self, project_key: str, branch: str) -> bool: - try: - r = self.post("views/add_project_branch", params={"key": self.key, "project": project_key, "branch": branch}) - except HTTPError as e: - if e.response.status_code == HTTPStatus.NOT_FOUND: - Portfolio.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"Project '{project_key}' or branch '{branch}' not found, can't be added to {str(self)}") - if e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("%s: Project '%s' branch '%s', already in %s", util.error_msg(e), project_key, branch, str(self)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding projects to {str(self)}") + r = self.post("views/add_project_branch", params={"key": self.key, "project": project_key, "branch": branch}) if project_key in self._selection_mode[_SELECTION_MODE_MANUAL]: self._selection_mode[_SELECTION_MODE_MANUAL][project_key].discard(c.DEFAULT_BRANCH) self._selection_mode[_SELECTION_MODE_MANUAL][project_key].add(branch) @@ -529,23 +509,20 @@ def add_application(self, app_key: str) -> bool: def add_application_branch(self, app_key: str, branch: str = c.DEFAULT_BRANCH) -> bool: app = applications.Application.get_object(self.endpoint, app_key) - try: - if branch == c.DEFAULT_BRANCH: - log.info("%s: Adding %s default branch", str(self), str(app)) - self.post("views/add_application", params={"portfolio": self.key, "application": app_key}, mute=(HTTPStatus.BAD_REQUEST,)) - else: - app_branch = app_branches.ApplicationBranch.get_object(app=app, branch_name=branch) - log.info("%s: Adding %s", str(self), str(app_branch)) - params = {"key": self.key, "application": app_key, "branch": branch} - self.post("views/add_application_branch", params=params, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding app branch to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if branch == c.DEFAULT_BRANCH: + log.info("%s: Adding %s default branch", str(self), str(app)) + self.post("views/add_application", params={"portfolio": self.key, "application": app_key}, mute=(HTTPStatus.BAD_REQUEST,)) + else: + app_branch = app_branches.ApplicationBranch.get_object(app=app, branch_name=branch) + log.info("%s: Adding %s", str(self), str(app_branch)) + params = {"key": self.key, "application": app_key, "branch": branch} + self.post("views/add_application_branch", params=params, mute=(HTTPStatus.BAD_REQUEST,)) if app_key not in self._applications: self._applications[app_key] = [] self._applications[app_key].append(branch) return True - def add_subportfolio(self, key: str, name: str = None, by_ref: bool = False) -> object: + def add_subportfolio(self, key: str, name: str = None, by_ref: bool = False) -> Portfolio: """Adds a subportfolio to a portfolio, defined by key, name and by reference option""" log.info("Adding sub-portfolios to %s", str(self)) @@ -599,8 +576,7 @@ def get_project_list(self) -> list[str]: data = json.loads(self.get("api/measures/component_tree", params=params).text) nbr_projects = util.nbr_total_elements(data) proj_key_list += [comp["refKey"] for comp in data["components"]] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting projects list of {str(self)}", catch_all=True) + except exceptions.SonarException: break nbr_pages = util.nbr_pages(data) log.debug("Number of projects: %d - Page: %d/%d", nbr_projects, page, nbr_pages) @@ -644,7 +620,12 @@ def update(self, data: dict[str, str], recurse: bool) -> None: if subp_data.get("byReference", False): o_subp = Portfolio.get_object(self.endpoint, key) if o_subp.key not in key_list: - self.add_subportfolio(o_subp.key, name=o_subp.name, by_ref=True) + try: + self.add_subportfolio(o_subp.key, name=o_subp.name, by_ref=True) + except exceptions.SonarException as e: + # If the exception is that the portfolio already references, just pass + if "already references" not in e.message: + raise else: try: o_subp = Portfolio.get_object(self.endpoint, key) @@ -716,12 +697,7 @@ def exists(endpoint: pf.Platform, key: str) -> bool: def delete(endpoint: pf.Platform, key: str) -> bool: """Deletes a portfolio by its key""" - try: - p = Portfolio.get_object(endpoint, key) - p.delete() - return True - except exceptions.ObjectNotFound: - return False + return Portfolio.get_object(endpoint, key).delete() def import_config(endpoint: pf.Platform, config_data: types.ObjectJsonRepr, key_list: types.KeyList = None) -> bool: @@ -757,7 +733,7 @@ def import_config(endpoint: pf.Platform, config_data: types.ObjectJsonRepr, key_ try: o = Portfolio.get_object(endpoint, key) o.update(data=data, recurse=True) - except exceptions.ObjectNotFound as e: + except exceptions.SonarException as e: log.error(e.message) return True @@ -800,8 +776,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg exported_portfolios[k] = exp else: log.debug("Skipping export of %s, it's a standard sub-portfolio", str(p)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"exporting {str(p)}, export will be empty for this portfolio", catch_all=True) + except exceptions.SonarException: exported_portfolios[k] = {} i += 1 if i % 10 == 0 or i == nb_portfolios: diff --git a/sonar/projects.py b/sonar/projects.py index 7a1ae1a13..65c9418dc 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -35,7 +35,6 @@ from http import HTTPStatus from threading import Lock from requests import HTTPError, RequestException -import Levenshtein import sonar.logging as log import sonar.platform as pf @@ -182,16 +181,10 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Project: :param str key: Project key to search :raises ObjectNotFound: if project key not found :return: The Project - :rtype: Project """ - o = Project.CACHE.get(key, endpoint.local_url) - if o: + if o := Project.CACHE.get(key, endpoint.local_url): return o - try: - data = json.loads(endpoint.get(Project.API[c.READ], params={"component": key}).text) - except RequestException as e: - util.handle_error(e, f"Getting project {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Project key '{key}' not found") + data = json.loads(endpoint.get(Project.API[c.READ], params={"component": key}).text) return cls.load(endpoint, data["component"]) @classmethod @@ -205,8 +198,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> Project: :rtype: Project """ key = data["key"] - o = Project.CACHE.get(key, endpoint.local_url) - if not o: + if not (o := Project.CACHE.get(key, endpoint.local_url)): o = cls(endpoint, key) o.reload(data) return o @@ -221,11 +213,7 @@ def create(cls, endpoint: pf.Platform, key: str, name: str) -> Project: :return: The Project :rtype: Project """ - try: - endpoint.post(Project.API[c.CREATE], params={"project": key, "name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating project '{key}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(key, e.response.text) + endpoint.post(Project.API[c.CREATE], params={"project": key, "name": name}) o = cls(endpoint, key) o.name = name return o @@ -246,14 +234,12 @@ def refresh(self) -> Project: :raises ObjectNotFound: if project key not found :return: self - :rtype: Project """ try: data = json.loads(self.get(Project.api_for(c.READ, self.endpoint), params=self.api_params(c.READ)).text) - except RequestException as e: - util.handle_error(e, f"searching project {self.key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + raise return self.reload(data["component"]) def reload(self, data: types.ApiPayload) -> Project: @@ -264,10 +250,7 @@ def reload(self, data: types.ApiPayload) -> Project: :rtype: Project """ """Loads a project object with contents of an api/projects/search call""" - if self.sq_json is None: - self.sq_json = data - else: - self.sq_json.update(data) + self.sq_json = (self.sq_json or {}) | data self.name = data["name"] self._visibility = data["visibility"] if "lastAnalysisDate" in data: @@ -376,7 +359,7 @@ def delete(self) -> bool: :raises ObjectNotFound: If object to delete was not found in SonarQube :raises request.HTTPError: In all other cases of HTTP Errors - :return: Nothing + :return: Whether the operation succeeded """ loc = int(self.get_measure("ncloc", fallback="0")) log.info("Deleting %s, name '%s' with %d LoCs", str(self), self.name, loc) @@ -397,8 +380,7 @@ def binding(self) -> Optional[dict[str, str]]: try: resp = self.get("alm_settings/get_binding", params={"project": self.key}, mute=(HTTPStatus.NOT_FOUND,)) self._binding = {"has_binding": True, "binding": json.loads(resp.text)} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting binding of {str(self)}", catch_http_errors=True, log_level=log.DEBUG) + except exceptions.SonarException: # Hack: 8.9 returns 404, 9.x returns 400 self._binding = {"has_binding": False} log.debug("%s binding = %s", str(self), str(self._binding.get("binding", None))) @@ -598,8 +580,8 @@ def ci(self) -> str: data = json.loads(self.get("project_analyses/search", params={"project": self.key, "ps": 1}).text)["analyses"] if len(data) > 0: self._ci, self._revision = data[0].get("detectedCI", "unknown"), data[0].get("revision", "unknown") - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting CI tool of {str(self)}", catch_all=True) + except exceptions.SonarException: + pass except KeyError: log.warning("KeyError, can't retrieve CI tool and revision") return self._ci @@ -682,10 +664,9 @@ def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: problems += self.__audit_branches(audit_settings) problems += self.__audit_pull_requests(audit_settings) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"auditing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, str(e)) + raise return problems @@ -703,10 +684,13 @@ def export_zip(self, asynchronous: bool = False, timeout: int = 180) -> tuple[st ) try: resp = self.post("project_dump/export", params={"key": self.key}) + except exceptions.ObjectNotFound as e: + Project.CACHE.pop(self) + return f"FAILED/{e.message}", None + except exceptions.SonarException as e: + return f"FAILED/{e.message}", None except RequestException as e: util.handle_error(e, f"exporting zip of {str(self)}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(self.key, f"Project key '{self.key}' not found") return f"FAILED/{util.http_error_string(e.response.status_code)}", None except ConnectionError as e: return str(e), None @@ -736,13 +720,13 @@ def import_zip(self, asynchronous: bool = False, timeout: int = 180) -> str: raise exceptions.UnsupportedOperation("Project import is only available with Enterprise and Datacenter Edition") try: resp = self.post("project_dump/import", params={"key": self.key}) - except RequestException as e: - if "Dump file does not exist" in util.sonar_error(e.response): + except exceptions.ObjectNotFound as e: + Project.CACHE.pop(self) + return f"FAILED/{e.message}" + except exceptions.SonarException as e: + if "Dump file does not exist" in e.message: return f"FAILED/{tasks.ZIP_MISSING}" - util.handle_error(e, f"importing zip of {str(self)} {mode}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(self.key, f"Project key '{self.key}' not found") - return f"FAILED/{util.http_error_string(e.response.status_code)}" + return f"FAILED/{e.message}" except ConnectionError as e: return f"FAILED/{str(e)}" @@ -771,7 +755,7 @@ def get_branches_and_prs(self, filters: dict[str, str]) -> Optional[dict[str, ob else: try: objects = {b: branches.Branch.get_object(concerned_object=self, branch_name=b) for b in br} - except (exceptions.ObjectNotFound, exceptions.UnsupportedOperation) as e: + except exceptions.SonarException as e: log.error(e.message) if pr: if "*" in pr: @@ -779,17 +763,15 @@ def get_branches_and_prs(self, filters: dict[str, str]) -> Optional[dict[str, ob else: try: objects.update({p: pull_requests.get_object(project=self, pull_request_key=p) for p in pr}) - except exceptions.ObjectNotFound as e: + except exceptions.SonarException as e: log.error(e.message) return objects def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) -> dict[str, object]: """Returns a project list of findings (issues and hotspots) - :param branch: branch name to consider, if any - :type branch: str, optional - :param pr: PR key to consider, if any - :type pr: str, optional + :param str branch: optional branch name to consider, if any + :param str pr: optional PR key to consider, if any :return: JSON of all findings, with finding key as key :rtype: dict{key: Finding} """ @@ -800,17 +782,9 @@ def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) - return {} log.info("Exporting findings for %s", str(self)) findings_list = {} - params = {"project": self.key} - if branch is not None: - params["branch"] = branch - elif pr is not None: - params["pullRequest"] = pr + params = util.remove_nones({"project": self.key, "branch": branch, "pullRequest": pr}) - try: - data = json.loads(self.get("projects/export_findings", params=params).text)["export_findings"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, "getting project findings", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - return {} + data = json.loads(self.get("projects/export_findings", params=params).text)["export_findings"] findings_conflicts = {"SECURITY_HOTSPOT": 0, "BUG": 0, "CODE_SMELL": 0, "VULNERABILITY": 0} nbr_findings = {"SECURITY_HOTSPOT": 0, "BUG": 0, "CODE_SMELL": 0, "VULNERABILITY": 0} for i in data: @@ -825,7 +799,8 @@ def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) - i["pullRequest"] = pr nbr_findings[i["type"]] += 1 if i["type"] == "SECURITY_HOTSPOT": - findings_list[key] = hotspots.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) + if i.get("status", "") != "CLOSED": + findings_list[key] = hotspots.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) else: findings_list[key] = issues.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) for t in ("SECURITY_HOTSPOT", "BUG", "CODE_SMELL", "VULNERABILITY"): @@ -842,9 +817,8 @@ def get_hotspots(self, filters: Optional[dict[str, str]] = None) -> dict[str, ob if branches_or_prs is None: return super().get_hotspots(filters) findings_list = {} - for comp in branches_or_prs.values(): - if comp: - findings_list = {**findings_list, **comp.get_hotspots()} + for component in [comp for comp in branches_or_prs.values() if comp]: + findings_list |= component.get_hotspots() return findings_list def get_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, object]: @@ -852,9 +826,8 @@ def get_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, obje if branches_or_prs is None: return super().get_issues(filters) findings_list = {} - for comp in branches_or_prs.values(): - if comp: - findings_list = {**findings_list, **comp.get_issues()} + for component in [comp for comp in branches_or_prs.values() if comp]: + findings_list |= component.get_issues() return findings_list def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, int]: @@ -865,9 +838,7 @@ def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> return super().count_third_party_issues(filters) log.debug("Getting 3rd party issues on branches/PR") issue_counts = {} - for comp in branches_or_prs.values(): - if not comp: - continue + for comp in [co for co in branches_or_prs.values() if co]: log.debug("Getting 3rd party issues for %s", str(comp)) for k, total in comp.count_third_party_issues(filters).items(): if k not in issue_counts: @@ -953,12 +924,8 @@ def quality_gate(self) -> Optional[tuple[str, bool]]: :return: name of quality gate and whether it's the default :rtype: tuple(name, is_default) """ - try: - data = json.loads(self.get(api="qualitygates/get_by_project", params={"project": self.key}).text) - return data["qualityGate"]["name"], data["qualityGate"]["default"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting quality gate of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN,)) - return "Error - Insufficient Permissions", False + data = json.loads(self.get(api="qualitygates/get_by_project", params={"project": self.key}).text) + return data["qualityGate"]["name"], data["qualityGate"]["default"] def webhooks(self) -> dict[str, webhooks.WebHook]: """ @@ -966,11 +933,7 @@ def webhooks(self) -> dict[str, webhooks.WebHook]: :rtype: dict{key: WebHook} """ log.debug("Getting %s webhooks", str(self)) - try: - return webhooks.get_list(endpoint=self.endpoint, project_key=self.key) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting webhooks of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) - return None + return webhooks.get_list(endpoint=self.endpoint, project_key=self.key) def links(self) -> Optional[list[dict[str, str]]]: """ @@ -979,8 +942,7 @@ def links(self) -> Optional[list[dict[str, str]]]: """ try: data = json.loads(self.get(api="project_links/search", params={"projectKey": self.key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting links of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) + except exceptions.SonarException: return None link_list = None for link in data["links"]: @@ -1066,8 +1028,7 @@ def export(self, export_settings: types.ConfigSettings, settings_list: dict[str, try: hooks = webhooks.export(self.endpoint, self.key) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting webhooks of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN,)) + except exceptions.SonarException: hooks = None if hooks is not None: json_data["webhooks"] = hooks @@ -1123,12 +1084,7 @@ def set_permissions(self, desired_permissions: types.ObjectJsonRepr) -> bool: :type desired_permissions: dict :return: Nothing """ - try: - self.permissions().set(desired_permissions) - return True - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting permissions of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - return False + self.permissions().set(desired_permissions) def set_links(self, desired_links: types.ObjectJsonRepr) -> bool: """Sets project links @@ -1139,15 +1095,11 @@ def set_links(self, desired_links: types.ObjectJsonRepr) -> bool: """ params = {"projectKey": self.key} ok = True - try: - for link in desired_links.get("links", {}): - if link.get("type", "") != "custom": - continue - params.update(link) - ok = ok and self.post("project_links/create", params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting links of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - return False + for link in desired_links.get("links", {}): + if link.get("type", "") != "custom": + continue + params.update(link) + ok = ok and self.post("project_links/create", params=params).ok return ok def set_quality_gate(self, quality_gate: str) -> bool: @@ -1158,17 +1110,8 @@ def set_quality_gate(self, quality_gate: str) -> bool: """ if quality_gate is None: return False - try: - _ = qualitygates.QualityGate.get_object(self.endpoint, quality_gate) - except exceptions.ObjectNotFound: - log.warning("Quality gate '%s' not found, can't set it for %s", quality_gate, str(self)) - return False - log.debug("Setting quality gate '%s' for %s", quality_gate, str(self)) - try: - return self.post("qualitygates/select", params={"projectKey": self.key, "gateName": quality_gate}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting permissions of {str(self)}", catch_all=True) - return False + _ = qualitygates.QualityGate.get_object(self.endpoint, quality_gate) + return self.post("qualitygates/select", params={"projectKey": self.key, "gateName": quality_gate}).ok def set_contains_ai_code(self, contains_ai_code: bool) -> bool: """Sets whether a project contains AI code @@ -1178,14 +1121,10 @@ def set_contains_ai_code(self, contains_ai_code: bool) -> bool: """ if self.endpoint.version() < (10, 7, 0) or self.endpoint.edition() == c.CE: return False - try: - api = "projects/set_contains_ai_code" - if self.endpoint.version() == (10, 7, 0): - api = "projects/set_ai_code_assurance" - return self.post(api, params={"project": self.key, "contains_ai_code": str(contains_ai_code).lower()}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting contains AI code of {str(self)}", catch_all=True) - return False + api = "projects/set_contains_ai_code" + if self.endpoint.version() == (10, 7, 0): + api = "projects/set_ai_code_assurance" + return self.post(api, params={"project": self.key, "contains_ai_code": str(contains_ai_code).lower()}).ok def set_quality_profile(self, language: str, quality_profile: str) -> bool: """Sets project quality profile for a given language @@ -1198,15 +1137,7 @@ def set_quality_profile(self, language: str, quality_profile: str) -> bool: log.warning("Quality profile '%s' in language '%s' does not exist, can't set it for %s", quality_profile, language, str(self)) return False log.debug("Setting quality profile '%s' of language '%s' for %s", quality_profile, language, str(self)) - try: - return self.post("qualityprofiles/add_project", params={"project": self.key, "qualityProfile": quality_profile, "language": language}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting quality profile of {str(self)}", catch_all=True) - errcode, msg = util.http_error_and_code(e) - if errcode == errcodes.OBJECT_NOT_FOUND: - Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, msg) - return False + return self.post("qualityprofiles/add_project", params={"project": self.key, "qualityProfile": quality_profile, "language": language}).ok def rename_main_branch(self, main_branch_name: str) -> bool: """Renames the project main branch @@ -1391,11 +1322,8 @@ def update(self, config: types.ObjectJsonRepr) -> None: except StopIteration: log.warning("No main branch defined in %s configuration", self) for branch_name, branch_data in branch_config.items(): - try: - branch = branches.Branch.get_object(self, branch_name) - branch.import_config(branch_data) - except exceptions.ObjectNotFound: - log.warning("Branch %s does not exist in %s, skipping update", branch_name, str(self)) + branch = branches.Branch.get_object(self, branch_name) + branch.import_config(branch_data) if "binding" in config: try: self.set_devops_binding(config["binding"]) @@ -1461,28 +1389,19 @@ def get_list(endpoint: pf.Platform, key_list: types.KeyList = None, threads: int def get_matching_list(endpoint: pf.Platform, pattern: str, threads: int = 8) -> dict[str, Project]: - """ + """Returns the list of projects whose keys are matching the pattern + :param Platform endpoint: Reference to the SonarQube platform :param str pattern: Regular expression to match project keys :return: the list of all projects matching the pattern - :rtype: dict{key: Project} """ - if not pattern or pattern == ".*": - return get_list(endpoint, threads=threads) + pattern = pattern or ".+" log.info("Listing projects matching regexp '%s'", pattern) matches = {k: v for k, v in get_list(endpoint, threads=threads).items() if re.match(rf"^{pattern}$", k)} log.info("%d project key matching regexp '%s'", len(matches), pattern) return matches -def __similar_keys(key1: str, key2: str, max_distance: int = 5) -> bool: - """Returns whether 2 project keys are similar""" - if key1 == key2: - return False - max_distance = min(len(key1) // 2, len(key2) // 2, max_distance) - return len(key2) >= 7 and (re.match(key2, key1)) or Levenshtein.distance(key1, key2, score_cutoff=6) <= max_distance - - def __audit_duplicates(projects_list: dict[str, Project], audit_settings: types.ConfigSettings) -> list[Problem]: """Audits for suspected duplicate projects""" if audit_settings.get(c.AUDIT_MODE_PARAM, "") == "housekeeper": @@ -1496,7 +1415,7 @@ def __audit_duplicates(projects_list: dict[str, Project], audit_settings: types. for key1, p in projects_list.items(): for key2 in projects_list: pair = " ".join(sorted([key1, key2])) - if __similar_keys(key1, key2, audit_settings.get("audit.projects.duplicates.maxDifferences", 4)) and pair not in pair_set: + if util.similar_strings(key1, key2, audit_settings.get("audit.projects.duplicates.maxDifferences", 4)) and pair not in pair_set: duplicates.append(Problem(get_rule(RuleId.PROJ_DUPLICATE), p, str(p), key2)) pair_set.add(pair) return duplicates @@ -1525,7 +1444,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) :param Platform endpoint: reference to the SonarQube platform :param ConfigSettings audit_settings: Configuration of audit - :returns: list of problems found + :return: list of problems found """ if not audit_settings.get("audit.projects", True): log.info("Auditing projects is disabled, audit skipped...") @@ -1546,7 +1465,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) try: problems += (proj_pbs := future.result(timeout=60)) write_q and write_q.put(proj_pbs) - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when auditing {str(futures_map[future])}.") current += 1 lvl = log.INFO if current % 10 == 0 or total - current < 10 else log.DEBUG @@ -1564,7 +1483,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg :param Platform endpoint: reference to the SonarQube platform :param ConfigSettings export_settings: Export parameters - :returns: list of projects settings + :return: list of projects settings """ write_q = kwargs.get("write_q", None) @@ -1585,7 +1504,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg exp_json = future.result(timeout=60) write_q and write_q.put(exp_json) results[futures_map[future].key] = exp_json - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when exporting {str(futures_map[future])}.") current += 1 lvl = log.INFO if current % 10 == 0 or total - current < 10 else log.DEBUG @@ -1595,12 +1514,12 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg return dict(sorted(results.items())) -def exists(key: str, endpoint: pf.Platform) -> bool: - """ - :param str key: project key to check +def exists(endpoint: pf.Platform, key: str) -> bool: + """Returns whether a project exists + :param Platform endpoint: reference to the SonarQube platform - :returns: whether the project exists - :rtype: bool + :param str key: project key to check + :return: whether the project exists """ try: Project.get_object(endpoint, key) @@ -1721,7 +1640,8 @@ def export_zips( return results -def import_zip(endpoint: pf.Platform, project_key: str, import_timeout: int = 30) -> tuple[str, str]: +def import_zip(endpoint: pf.Platform, project_key: str, import_timeout: int = 30) -> tuple[Project, str]: + """Imports a project zip file""" try: o_proj = Project.create(key=project_key, endpoint=endpoint, name=project_key) except exceptions.ObjectAlreadyExists: @@ -1766,15 +1686,18 @@ def import_zips(endpoint: pf.Platform, project_list: list[str], threads: int = 2 status = f"EXCEPTION {e}" statuses_count[status] = statuses_count[status] + 1 if status in statuses_count else 1 if o_proj is None: - o_proj = futures_map[future] - statuses[o_proj.key] = {} + proj_key = futures_map[future] + statuses[proj_key] = {"importStatus": status} else: - statuses[o_proj.key] = {"importDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S")} - statuses[o_proj.key]["importProjectUrl"] = o_proj.url() - statuses[o_proj.key]["importStatus"] = status + proj_key = o_proj.key + statuses[proj_key] = { + "importDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "importProjectUrl": o_proj.url(), + "importStatus": status, + } i += 1 - log.info("%d/%d imports (%d%%) - Latest: %s - %s", i, nb_projects, int(i * 100 / nb_projects), o_proj.key, status) + log.info("%d/%d imports (%d%%) - Latest: %s - %s", i, nb_projects, int(i * 100 / nb_projects), proj_key, status) log.info("%s", ", ".join([f"{k}:{v}" for k, v in statuses_count.items()])) return statuses diff --git a/sonar/pull_requests.py b/sonar/pull_requests.py index 28a3afc36..cea448bdf 100644 --- a/sonar/pull_requests.py +++ b/sonar/pull_requests.py @@ -106,6 +106,16 @@ def api_params(self, op: Optional[str] = None) -> types.ApiParams: ops = {c.READ: {"project": self.concerned_object.key, "pullRequest": self.key}} return ops[op] if op and op in ops else ops[c.READ] + def get_findings(self, filters: Optional[types.ApiParams] = None) -> dict[str, object]: + """Returns a PR list of findings + + :return: dict of Findings, with finding key as key + :rtype: dict{key: Finding} + """ + if not filters: + return self.concerned_object.get_findings(pr=self.key) + return self.get_issues(filters) | self.get_hotspots(filters) + def get_object(pull_request_key: str, project: object, data: types.ApiPayload = None) -> Optional[PullRequest]: """Returns a PR object from a PR key and a project""" diff --git a/sonar/qualitygates.py b/sonar/qualitygates.py index a6af2c329..65cd1506f 100644 --- a/sonar/qualitygates.py +++ b/sonar/qualitygates.py @@ -26,9 +26,7 @@ from __future__ import annotations from typing import Union -from http import HTTPStatus import json -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq @@ -166,11 +164,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> QualityGate: @classmethod def create(cls, endpoint: pf.Platform, name: str) -> Union[QualityGate, None]: """Creates an empty quality gate""" - try: - endpoint.post(QualityGate.API[c.CREATE], params={"name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating quality gate '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, e.response.text) + endpoint.post(QualityGate.API[c.CREATE], params={"name": name}) return cls.get_object(endpoint, name) def __str__(self) -> str: @@ -195,24 +189,19 @@ def projects(self) -> dict[str, projects.Project]: """ :raises ObjectNotFound: If Quality gate not found :return: The list of projects using this quality gate - :rtype: dict {: } """ if self._projects is not None: return self._projects - if self.endpoint.is_sonarcloud(): - params = {"gateId": self.key, "ps": 500} - else: - params = {"gateName": self.name, "ps": 500} + params = {"ps": 500} | {"gateId": self.key} if self.endpoint.is_sonarcloud() else {"gateName": self.name} page, nb_pages = 1, 1 self._projects = {} while page <= nb_pages: params["p"] = page try: resp = self.get(QualityGate.API["get_projects"], params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting projects of {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: QualityGate.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.name, f"{str(self)} not found") + raise data = json.loads(resp.text) for prj in data["results"]: key = prj["key"] if "key" in prj else prj["id"] @@ -272,8 +261,7 @@ def set_conditions(self, conditions_list: list[str]) -> bool: (params["metric"], params["op"], params["error"]) = _decode_condition(cond) try: ok = ok and self.post("qualitygates/create_condition", params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding condition '{cond}' to {str(self)}", catch_all=True) + except exceptions.SonarException: ok = False self._conditions = None self.conditions() @@ -308,14 +296,14 @@ def set_as_default(self) -> bool: """ params = {"id": self.key} if self.endpoint.is_sonarcloud() else {"name": self.name} try: - r = self.post("qualitygates/set_as_default", params=params) + ok = self.post("qualitygates/set_as_default", params=params).ok # Turn off default for all other quality gates except the current one for qg in get_list(self.endpoint).values(): qg.is_default = qg.name == self.name - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} as default quality gate") + except exceptions.SonarException: return False - return r.ok + else: + return ok def update(self, **data) -> bool: """Updates a quality gate diff --git a/sonar/qualityprofiles.py b/sonar/qualityprofiles.py index d20cab927..1ec490cef 100644 --- a/sonar/qualityprofiles.py +++ b/sonar/qualityprofiles.py @@ -24,11 +24,9 @@ from typing import Optional import json from datetime import datetime -from http import HTTPStatus import concurrent.futures from threading import Lock -from requests import RequestException import requests.utils import sonar.logging as log @@ -132,11 +130,7 @@ def create(cls, endpoint: pf.Platform, name: str, language: str) -> Optional[Qua log.error("Language '%s' does not exist, quality profile creation aborted") return None log.debug("Creating quality profile '%s' of language '%s'", name, language) - try: - endpoint.post(QualityProfile.API[c.CREATE], params={"name": name, "language": language}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating quality profile '{language}:{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{language}:{name}", e.response.text) + endpoint.post(QualityProfile.API[c.CREATE], params={"name": name, "language": language}) return cls.read(endpoint=endpoint, name=name, language=language) @classmethod @@ -155,11 +149,7 @@ def clone(cls, endpoint: pf.Platform, name: str, language: str, original_qp_name raise exceptions.ObjectNotFound(f"{language}:{original_qp_name}", f"Quality profile {language}:{original_qp_name} not found") original_qp = l[0] log.debug("Found QP to clone: %s", str(original_qp)) - try: - endpoint.post("qualityprofiles/copy", params={"toName": name, "fromKey": original_qp.key}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"cloning {str(original_qp)} into name '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{language}:{name}", e.response.text) + endpoint.post("qualityprofiles/copy", params={"toName": name, "fromKey": original_qp.key}) return cls.read(endpoint=endpoint, name=name, language=language) @classmethod @@ -277,7 +267,7 @@ def rules(self, use_cache: bool = False) -> dict[str, rules.Rule]: # Assume nobody changed QP during execution return self._rules rule_key_list = rules.search_keys(self.endpoint, activation="true", qprofile=self.key, s="key", languages=self.language) - self._rules = {k: rules.get_object(self.endpoint, k) for k in rule_key_list} + self._rules = {k: rules.Rule.get_object(self.endpoint, k) for k in rule_key_list} return self._rules def activate_rule(self, rule_key: str, severity: Optional[str] = None, **params) -> bool: @@ -294,14 +284,13 @@ def activate_rule(self, rule_key: str, severity: Optional[str] = None, **params) if len(params) > 0: api_params["params"] = ";".join([f"{k}={v}" for k, v in params.items()]) try: - r = self.post("qualityprofiles/activate_rule", params=api_params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"activating rule {rule_key} in {str(self)}", catch_all=True) + ok = self.post("qualityprofiles/activate_rule", params=api_params).ok + except exceptions.SonarException: return False if self._rules is None: self._rules = {} - self._rules[rule_key] = rules.get_object(self.endpoint, rule_key) - return r.ok + self._rules[rule_key] = rules.Rule.get_object(self.endpoint, rule_key) + return ok def deactivate_rule(self, rule_key: str) -> bool: """Deactivates a rule in the quality profile @@ -312,11 +301,9 @@ def deactivate_rule(self, rule_key: str) -> bool: """ log.debug("Deactivating rule %s in %s", rule_key, str(self)) try: - r = self.post("qualityprofiles/deactivate_rule", params={"key": self.key, "rule": rule_key}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deactivating rule {rule_key} in {str(self)}", catch_all=True) + return self.post("qualityprofiles/deactivate_rule", params={"key": self.key, "rule": rule_key}).ok + except exceptions.SonarException: return False - return r.ok def deactivate_rules(self, ruleset: list[str]) -> bool: """Deactivates a list of rules in the quality profile @@ -325,7 +312,7 @@ def deactivate_rules(self, ruleset: list[str]) -> bool: """ ok = True for r_key in ruleset: - ok = ok and self.deactivate_rule(rule_key=r_key) + ok = self.deactivate_rule(rule_key=r_key) and ok self.rules(use_cache=False) return ok @@ -442,7 +429,7 @@ def rule_impacts(self, rule_key: str, substitute_with_default: bool = True) -> d :return: The severities of the rule in the quality profile :rtype: dict[str, str] """ - return rules.get_object(self.endpoint, rule_key).impacts(self.key, substitute_with_default=substitute_with_default) + return rules.Rule.get_object(self.endpoint, rule_key).impacts(self.key, substitute_with_default=substitute_with_default) def __process_rules_diff(self, rule_set: dict[str:str]) -> dict[str:str]: diff_rules = {} @@ -801,8 +788,7 @@ def get_object(endpoint: pf.Platform, name: str, language: str) -> Optional[Qual :return: The quality profile object, of None if not found """ get_list(endpoint) - o = QualityProfile.CACHE.get(name, language, endpoint.local_url) - if not o: + if not (o := QualityProfile.CACHE.get(name, language, endpoint.local_url)): raise exceptions.ObjectNotFound(name, message=f"Quality Profile '{language}:{name}' not found") return o diff --git a/sonar/rules.py b/sonar/rules.py index ec3ca9583..510ecc9c5 100644 --- a/sonar/rules.py +++ b/sonar/rules.py @@ -29,8 +29,6 @@ import concurrent.futures from threading import Lock from typing import Optional -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq @@ -189,15 +187,16 @@ def __init__(self, endpoint: platform.Platform, key: str, data: types.ApiPayload @classmethod def get_object(cls, endpoint: platform.Platform, key: str) -> Rule: - """Returns a rule object from the cache or from the platform itself""" - o = Rule.CACHE.get(key, endpoint.local_url) - if o: + """Returns a rule object from it key, taken from the cache or from the platform itself + + :param Platform endpoint: The SonarQube reference + :param str key: The rule key + :return: The Rule object corresponding to the input rule key + :raises: ObjectNotFound if rule does not exist + """ + if o := Rule.CACHE.get(key, endpoint.local_url): return o - try: - r = endpoint.get(Rule.API[c.READ], params={"key": key, "actives": "true"}) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting rule {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key=key, message=f"Rule key '{key}' does not exist") + r = endpoint.get(Rule.API[c.READ], params={"key": key, "actives": "true"}) return Rule(endpoint=endpoint, key=key, data=json.loads(r.text)["rule"]) @classmethod @@ -258,10 +257,9 @@ def refresh(self, use_cache: bool = True) -> bool: try: data = json.loads(self.get(Rule.API[c.READ], params={"key": self.key, "actives": "true"}).text) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"Reading {self}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Rule.CACHE.pop(self) - raise exceptions.ObjectNotFound(key=self.key, message=f"{self} does not exist") + raise self.sq_json.update(data["rule"]) self.sq_json["actives"] = data["actives"].copy() return True @@ -411,8 +409,8 @@ def search_keys(endpoint: platform.Platform, **params) -> list[str]: data = json.loads(endpoint.get(Rule.API[c.SEARCH], params=new_params).text) nbr_pages = utilities.nbr_pages(data) rule_list += [r[Rule.SEARCH_KEY_FIELD] for r in data[Rule.SEARCH_RETURN_FIELD]] - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, "searching rules", catch_all=True) + except exceptions.SonarException: + pass return rule_list @@ -450,18 +448,6 @@ def get_list(endpoint: platform.Platform, use_cache: bool = True, **params) -> d return rule_list -def get_object(endpoint: platform.Platform, key: str) -> Optional[Rule]: - """Returns a Rule object from its key - :return: The Rule object corresponding to the input rule key, or None if not found - :param str key: The rule key - :rtype: Rule or None - """ - try: - return Rule.get_object(key=key, endpoint=endpoint) - except exceptions.ObjectNotFound: - return None - - def export(endpoint: platform.Platform, export_settings: types.ConfigSettings, **kwargs) -> types.ObjectJsonRepr: """Returns a JSON export of all rules""" log.info("Exporting rules") diff --git a/sonar/settings.py b/sonar/settings.py index 28ff49e1e..d64deb289 100644 --- a/sonar/settings.py +++ b/sonar/settings.py @@ -25,8 +25,6 @@ import re import json from typing import Union, Optional -from http import HTTPStatus -from requests import HTTPError, RequestException import sonar.logging as log import sonar.platform as pf @@ -115,6 +113,7 @@ r"^sonar\.auth\..*\.organizations$", r"^sonar\.azureresourcemanager\.file\.identifier$", r"^sonar\.java\.jvmframeworkconfig\.file\.patterns$", + r"^sonar\.auth\.gitlab\.allowedGroups", ) VALID_SETTINGS = set() @@ -260,24 +259,23 @@ def set(self, value: any) -> bool: log.debug("Setting %s to value '%s'", str(self), str(value)) params = {"key": self.key, "component": self.component.key if self.component else None} | encode(self, value) try: - ok = self.post(Setting.API[c.CREATE], params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting setting '{self.key}' of {str(self.component)}", catch_all=True) + if ok := self.post(Setting.API[c.CREATE], params=params).ok: + self.value = value + except exceptions.SonarException: return False else: - self.value = value return ok def reset(self) -> bool: log.info("Resetting %s", str(self)) params = {"keys": self.key} | {} if not self.component else {"component": self.component.key} try: - r = self.post("settings/reset", params=params) + ok = self.post("settings/reset", params=params).ok self.refresh() - return r.ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"resetting setting '{self.key}' of {str(self.component)}", catch_all=True) + except exceptions.SonarException: return False + else: + return ok def to_json(self, list_as_csv: bool = True) -> types.ObjectJsonRepr: val = self.value @@ -290,7 +288,7 @@ def to_json(self, list_as_csv: bool = True) -> types.ObjectJsonRepr: break if val is None: val = "" - log.debug("JSON of %s = %s", self, {self.key: val}) + # log.debug("JSON of %s = %s", self, {self.key: val}) return {self.key: val} def definition(self) -> Optional[dict[str, str]]: @@ -469,17 +467,11 @@ def get_new_code_period(endpoint: pf.Platform, project_or_branch: object) -> Set def set_new_code_period(endpoint: pf.Platform, nc_type: str, nc_value: str, project_key: str = None, branch: str = None) -> bool: """Sets the new code period at global level or for a project""" log.debug("Setting new code period for project '%s' branch '%s' to value '%s = %s'", str(project_key), str(branch), str(nc_type), str(nc_value)) - try: - if endpoint.is_sonarcloud(): - ok = endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period.type", "value": nc_type, "project": project_key}).ok - ok = ok and endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period", "value": nc_value, "project": project_key}).ok - else: - ok = endpoint.post(Setting.API["NEW_CODE_SET"], params={"type": nc_type, "value": nc_value, "project": project_key, "branch": branch}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting new code period of {project_key}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(f"Can't set project new code period: {e.response.text}") - return False + if endpoint.is_sonarcloud(): + ok = endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period.type", "value": nc_type, "project": project_key}).ok + ok = ok and endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period", "value": nc_value, "project": project_key}).ok + else: + ok = endpoint.post(Setting.API["NEW_CODE_SET"], params={"type": nc_type, "value": nc_value, "project": project_key, "branch": branch}).ok return ok @@ -501,23 +493,16 @@ def get_visibility(endpoint: pf.Platform, component: object) -> str: def set_visibility(endpoint: pf.Platform, visibility: str, component: object = None) -> bool: """Sets the platform global default visibility or component visibility""" - try: - if component: - log.debug("Setting setting '%s' of %s to value '%s'", COMPONENT_VISIBILITY, str(component), visibility) - return endpoint.post("projects/update_visibility", params={"project": component.key, "visibility": visibility}).ok - else: - log.debug("Setting setting '%s' to value '%s'", PROJECT_DEFAULT_VISIBILITY, str(visibility)) - return endpoint.post("projects/update_default_visibility", params={"projectVisibility": visibility}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting comp or global visibility of {str(component)}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(f"Can't set comp or global visibility of {str(component)}: {e.response.text}") - return False + if component: + log.debug("Setting setting '%s' of %s to value '%s'", COMPONENT_VISIBILITY, str(component), visibility) + return endpoint.post("projects/update_visibility", params={"project": component.key, "visibility": visibility}).ok + else: + log.debug("Setting setting '%s' to value '%s'", PROJECT_DEFAULT_VISIBILITY, str(visibility)) + return endpoint.post("projects/update_default_visibility", params={"projectVisibility": visibility}).ok def set_setting(endpoint: pf.Platform, key: str, value: any, component: object = None) -> bool: """Sets a setting to a particular value""" - try: log.debug("Setting %s with value %s (for component %s)", key, value, component) s = get_object(endpoint=endpoint, key=key, component=component) @@ -525,13 +510,11 @@ def set_setting(endpoint: pf.Platform, key: str, value: any, component: object = log.warning("Setting '%s' does not exist on target platform, it cannot be set", key) return False s.set(value) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting setting '{key}' of {str(component)}", catch_all=True) - return False - except exceptions.UnsupportedOperation as e: + except exceptions.SonarException as e: log.error("Setting '%s' cannot be set: %s", key, e.message) return False - return True + else: + return True def decode(setting_key: str, setting_value: any) -> any: diff --git a/sonar/sqobject.py b/sonar/sqobject.py index f550cf758..bb28a9c80 100644 --- a/sonar/sqobject.py +++ b/sonar/sqobject.py @@ -29,7 +29,6 @@ from http import HTTPStatus import concurrent.futures import requests -from requests import RequestException import sonar.logging as log from sonar.util import types, cache @@ -40,10 +39,12 @@ class SqObject(object): """Abstraction of Sonar objects""" - CACHE = cache.Cache + CACHE = cache.Cache() API = {c.SEARCH: None} def __init__(self, endpoint: object, key: str) -> None: + if not self.__class__.CACHE: + self.__class__.CACHE.set_class(self.__class__) self.key = key #: Object unique key (unique in its class) self.endpoint = endpoint #: Reference to the SonarQube platform self.concerned_object = None @@ -113,7 +114,11 @@ def get( Typically, Error 404 Not found may be expected sometimes so this can avoid logging an error for 404 :return: The request response """ - return self.endpoint.get(api=api, params=params, data=data, mute=mute, **kwargs) + try: + return self.endpoint.get(api=api, params=params, data=data, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def post( self, @@ -131,7 +136,11 @@ def post( :type mute: tuple, optional :return: The request response """ - return self.endpoint.post(api=api, params=params, mute=mute, **kwargs) + try: + return self.endpoint.post(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def patch( self, @@ -149,7 +158,11 @@ def patch( :type mute: tuple, optional :return: The request response """ - return self.endpoint.patch(api=api, params=params, mute=mute, **kwargs) + try: + return self.endpoint.patch(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def delete(self) -> bool: """Deletes an object, returns whether the operation succeeded""" @@ -159,9 +172,6 @@ def delete(self) -> bool: if ok: log.info("Removing from %s cache", str(self.__class__.__name__)) self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") except (AttributeError, KeyError): raise exceptions.UnsupportedOperation(f"Can't delete {self.__class__.__name__.lower()}s") return ok @@ -176,15 +186,14 @@ def set_tags(self, tags: list[str]) -> bool: tags = list(set(utilities.csv_to_list(tags))) log.info("Settings tags %s to %s", tags, str(self)) try: - r = self.post(self.__class__.API[c.SET_TAGS], params={**self.api_params(c.SET_TAGS), "tags": utilities.list_to_csv(tags)}) - if r.ok: + if ok := self.post(self.__class__.API[c.SET_TAGS], params={**self.api_params(c.SET_TAGS), "tags": utilities.list_to_csv(tags)}).ok: self._tags = sorted(tags) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting tags of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException: return False except (AttributeError, KeyError): raise exceptions.UnsupportedOperation(f"Can't set tags on {self.__class__.__name__.lower()}s") - return r.ok + else: + return ok def get_tags(self, **kwargs) -> list[str]: """Returns object tags""" @@ -199,7 +208,7 @@ def get_tags(self, **kwargs) -> list[str]: data = json.loads(self.get(api, params=self.get_tags_params()).text) self.sq_json.update(data["component"]) self._tags = self.sq_json["tags"] - except (ConnectionError, RequestException): + except exceptions.SonarException: self._tags = [] return self._tags diff --git a/sonar/tasks.py b/sonar/tasks.py index 22b27e1d4..aa1c9a312 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -26,12 +26,11 @@ import json import re -from requests import RequestException - import sonar.logging as log import sonar.sqobject as sq import sonar.platform as pf +from sonar import exceptions import sonar.utilities as util from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem @@ -459,9 +458,8 @@ def search(endpoint: pf.Platform, only_current: bool = False, component_key: str try: data = json.loads(endpoint.get("ce/activity", params=params).text) return [Task(endpoint=endpoint, task_id=t["id"], data=t) for t in data["tasks"]] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting background tasks of component {component_key}", catch_all=True) - return [] + except exceptions.SonarException: + return [] def search_all_last(endpoint: pf.Platform) -> list[Task]: diff --git a/sonar/tokens.py b/sonar/tokens.py index ccd95e03e..9218079e9 100644 --- a/sonar/tokens.py +++ b/sonar/tokens.py @@ -26,14 +26,11 @@ import json import datetime -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq import sonar.platform as pf import sonar.utilities as util -from sonar import exceptions from sonar.util import types, cache, constants as c from sonar.audit.problem import Problem from sonar.audit.rules import get_rule, RuleId @@ -69,11 +66,7 @@ def create(cls, endpoint: pf.Platform, login: str, name: str) -> UserToken: :param login: User for which the token must be created :param name: Token name """ - try: - data = json.loads(endpoint.post(UserToken.API[c.CREATE], {"name": name, "login": login}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating token '{name}' for user '{login}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, e.response.text) + data = json.loads(endpoint.post(UserToken.API[c.CREATE], {"name": name, "login": login}).text) return UserToken(endpoint=endpoint, login=data["login"], json_data=data, name=name) def __str__(self) -> str: diff --git a/sonar/users.py b/sonar/users.py index a8ed9e8b7..2697fa685 100644 --- a/sonar/users.py +++ b/sonar/users.py @@ -27,7 +27,6 @@ import datetime as dt import json -from http import HTTPStatus from requests import RequestException import sonar.logging as log @@ -126,11 +125,7 @@ def create(cls, endpoint: pf.Platform, login: str, name: str, is_local: bool = T params = {"login": login, "local": str(is_local).lower(), "name": name} if is_local: params["password"] = password if password else login - try: - endpoint.post(User.api_for(c.CREATE, endpoint), params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating user '{login}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(login, util.sonar_error(e.response)) + endpoint.post(User.api_for(c.CREATE, endpoint), params=params) return cls.get_object(endpoint=endpoint, login=login) @classmethod @@ -143,13 +138,11 @@ def get_object(cls, endpoint: pf.Platform, login: str) -> User: :return: The user object :rtype: User """ - o = User.CACHE.get(login, endpoint.local_url) - if o: + if o := User.CACHE.get(login, endpoint.local_url): return o log.debug("Getting user '%s'", login) - for k, o in search(endpoint, params={"q": login}).items(): - if k == login: - return o + if user := next((o for k, o in search(endpoint, params={"q": login}).items() if k == login), None): + return user raise exceptions.ObjectNotFound(login, f"User '{login}' not found") @classmethod @@ -159,19 +152,15 @@ def get_object_by_id(cls, endpoint: pf.Platform, id: str) -> User: :param endpoint: Reference to the SonarQube platform :param id: User id :raises ObjectNotFound: if id not found - :raises UnsuppoertedOperation: If SonarQube version < 10.4 + :raises UnsupportedOperation: If SonarQube version < 10.4 :return: The user object :rtype: User """ if endpoint.version() < c.USER_API_V2_INTRO_VERSION: raise exceptions.UnsupportedOperation("Get by ID is an APIv2 features, staring from SonarQube 10.4") log.debug("Getting user id '%s'", id) - try: - data = json.loads(endpoint.get(f"/api/v2/users-management/users/{id}", mute=()).text) - return cls.load(endpoint, data) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting user id '{id}'", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(id, f"User id '{id}' not found") + data = json.loads(endpoint.get(f"/api/v2/users-management/users/{id}", mute=()).text) + return cls.load(endpoint, data) @classmethod def api_for(cls, op: str, endpoint: object) -> Optional[str]: @@ -358,11 +347,11 @@ def delete(self) -> bool: else: ok = self.post(api=User.API_V1[c.DELETE], params=self.api_params(c.DELETE)).ok if ok: - log.info("Removing from %s cache", str(self.__class__.__name__)) - self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + log.info("Removing from %s cache", str(User.__name__)) + User.CACHE.pop(self) + except exceptions.ObjectNotFound: + User.CACHE.pop(self) + raise return ok def api_params(self, op: str = c.GET) -> types.ApiParams: @@ -526,7 +515,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) for future in concurrent.futures.as_completed(futures): try: problems += future.result(timeout=60) - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when auditing {str(futures_map[future])}.") "write_q" in kwargs and kwargs["write_q"].put(problems) log.info("--- Auditing users: END ---") diff --git a/sonar/util/cache.py b/sonar/util/cache.py index d25d2ccb6..9ada5575d 100644 --- a/sonar/util/cache.py +++ b/sonar/util/cache.py @@ -21,19 +21,31 @@ """Cache manager""" from typing import Optional +from sonar import logging as log class Cache(object): """Abstract cache implementation""" def __init__(self) -> None: + """Constructor""" self.objects = {} + self.object_class = None def __len__(self) -> int: """Returns size of cache""" return len(self.objects) def __str__(self) -> str: + """string repr of Cache""" + return "'undefined class' cache" if not self.object_class else f"'{self.object_class.__name__}' cache" + + def set_class(self, object_class: object) -> None: + """Defines the class the cache is for""" + self.object_class = object_class + + def contents(self) -> str: + """Returns the cache contents as a string""" return ", ".join([str(o) for o in self.objects.values()]) def put(self, obj: object) -> object: @@ -41,13 +53,19 @@ def put(self, obj: object) -> object: h = hash(obj) if h not in self.objects: self.objects[h] = obj + else: + log.debug("%s already in cache, can't be added again", obj) + # log.debug("PUT %s: %s", self, self.contents()) return self.objects[h] def get(self, *args) -> Optional[object]: + # log.debug("GET %s: %s", self, self.contents()) return self.objects.get(hash(args), None) def pop(self, obj: object) -> Optional[object]: - return self.objects.pop(hash(obj), None) + o = self.objects.pop(hash(obj), None) + log.debug("POP %s: %s", self, self.contents()) + return o def values(self) -> list[object]: return list(self.objects.values()) @@ -59,4 +77,6 @@ def items(self) -> dict[int, object]: return self.objects.items() def clear(self) -> None: + """Clears a cache""" + # log.info("Clearing %s", self) self.objects = {} diff --git a/sonar/utilities.py b/sonar/utilities.py index 35115a623..32c5d0e6f 100644 --- a/sonar/utilities.py +++ b/sonar/utilities.py @@ -36,6 +36,8 @@ from copy import deepcopy import requests +import Levenshtein + import sonar.logging as log from sonar import version, errcodes from sonar.util import types, cache_helper @@ -468,26 +470,16 @@ def open_file(file: str = None, mode: str = "w") -> TextIO: def search_by_name(endpoint: object, name: str, api: str, returned_field: str, extra_params: dict[str, str] = None) -> Union[dict[str, str], None]: """Searches a object by name""" - params = {"q": name} - if extra_params is not None: - params.update(extra_params) + params = {"q": name} | (extra_params or {}) data = json.loads(endpoint.get(api, params=params).text) - for d in data[returned_field]: - if d["name"] == name: - return d - return None + return next((d for d in data[returned_field] if d["name"] == name), None) def search_by_key(endpoint: object, key: str, api: str, returned_field: str, extra_params: Optional[dict[str, str]] = None) -> types.ApiPayload: """Search an object by its key""" - params = {"q": key} - if extra_params is not None: - params.update(extra_params) + params = {"q": key} | (extra_params or {}) data = json.loads(endpoint.get(api, params=params).text) - for d in data[returned_field]: - if d["key"] == key: - return d - return None + return next((d for d in data[returned_field] if d["key"] == key), None) def sonar_error(response: requests.models.Response) -> str: @@ -823,3 +815,11 @@ def flatten(original_dict: dict[str, any]) -> dict[str, any]: else: flat_dict[k] = v return flat_dict + + +def similar_strings(key1: str, key2: str, max_distance: int = 5) -> bool: + """Returns whether 2 project keys are similar, but not equal""" + if key1 == key2: + return False + max_distance = min(len(key1) // 2, len(key2) // 2, max_distance) + return (len(key2) >= 7 and (re.match(key2, key1))) or Levenshtein.distance(key1, key2, score_cutoff=6) <= max_distance diff --git a/sonar/webhooks.py b/sonar/webhooks.py index 8e7f139d8..0e47e4fdc 100644 --- a/sonar/webhooks.py +++ b/sonar/webhooks.py @@ -24,8 +24,6 @@ from typing import Optional import json -from http import HTTPStatus -from requests import RequestException import sonar.logging as log from sonar import platform as pf, exceptions @@ -44,7 +42,7 @@ class WebHook(sq.SqObject): """ CACHE = cache.Cache() - API = {c.CREATE: "webhooks/create", c.READ: "webhooks/list", c.UPDATE: "webhooks/update", c.LIST: "webhooks/list"} + API = {c.CREATE: "webhooks/create", c.READ: "webhooks/list", c.UPDATE: "webhooks/update", c.LIST: "webhooks/list", c.DELETE: "webhooks/delete"} SEARCH_KEY_FIELD = "key" SEARCH_RETURN_FIELD = "webhooks" @@ -72,11 +70,7 @@ def create(cls, endpoint: pf.Platform, name: str, url: str, secret: Optional[str """ log.info("Creating webhook name %s, url %s project %s", name, url, str(project)) params = util.remove_nones({"name": name, "url": url, "secret": secret, "project": project}) - try: - endpoint.post(WebHook.API[c.CREATE], params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating Webhook '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, e.response.text) + endpoint.post(WebHook.API[c.CREATE], params=params) o = cls(endpoint, name=name, url=url, secret=secret, project=project) o.refresh() return o @@ -103,12 +97,9 @@ def get_object(cls, endpoint: pf.Platform, name: str, project_key: Optional[str] return o try: whs = list(get_list(endpoint, project_key).values()) - return next((wh for wh in whs if wh.name == name)) - except RequestException as e: - util.handle_error(e, f"Getting webhook '{name}' of project key '{project_key}'", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(project_key, f"Webhook '{name}' of project '{project_key}' not found") - except StopIteration: - raise exceptions.ObjectNotFound(project_key, f"Webhook '{name}' of project '{project_key}' not found") + return next(wh for wh in whs if wh.name == name) + except StopIteration as e: + raise exceptions.ObjectNotFound(project_key, f"Webhook '{name}' of project '{project_key}' not found") from e def __str__(self) -> str: return f"webhook '{self.name}'" @@ -127,24 +118,25 @@ def refresh(self) -> None: if wh_data is None: wh_name = str(self) name = self.name - self.delete() + WebHook.CACHE.pop(self) raise exceptions.ObjectNotFound(name, f"{wh_name} not found") self.reload(wh_data) def reload(self, data: types.ApiPayload) -> None: + """Reloads a WebHook from the payload gotten from SonarQube""" log.debug("Loading %s with %s", str(self), str(data)) self.sq_json = self.sq_json or {} | data self.name = data["name"] self.key = data["key"] self.webhook_url = data["url"] - self.secret = data.get("secret", None) + self.secret = data.get("secret", None) or self.secret self.last_delivery = data.get("latestDelivery", None) def url(self) -> str: """Returns the object permalink""" return f"{self.base_url(local=False)}/admin/webhooks" - def update(self, **kwargs) -> bool: + def update(self, **kwargs: str) -> bool: """Updates a webhook with new properties (name, url, secret) :param kwargs: dict - "url", "name", "secret" are the looked up keys @@ -173,6 +165,11 @@ def to_json(self, full: bool = False) -> dict[str, any]: """ return util.filter_export(self.sq_json, _IMPORTABLE_PROPERTIES, full) + def api_params(self, op: str) -> types.ApiParams: + """Returns the std api params to pass for a given webhook""" + ops = {c.READ: {"webhook": self.key}} + return ops[op] if op and op in ops else ops[c.READ] + def search(endpoint: pf.Platform, params: types.ApiParams = None) -> dict[str, WebHook]: """Searches webhooks @@ -204,7 +201,6 @@ def export(endpoint: pf.Platform, project_key: str = None, full: bool = False) - def import_config(endpoint: pf.Platform, data: types.ObjectJsonRepr, project_key: Optional[str] = None) -> None: """Imports a set of webhooks defined from a JSON description""" - log.debug("Importing webhooks %s for %s", str(data), str(project_key)) current_wh = get_list(endpoint, project_key=project_key) existing_webhooks = {wh.name: k for k, wh in current_wh.items()} diff --git a/test/test-sync.sh b/test/test-sync.sh index ef2b221f4..520310608 100755 --- a/test/test-sync.sh +++ b/test/test-sync.sh @@ -4,9 +4,9 @@ for proj in source target do curl -X POST -u "$SONAR_TOKEN:" "$SONAR_HOST_URL/api/projects/delete?project=$proj" opts=("-Dsonar.projectKey=$proj" "-Dsonar.projectName=$proj") - scan.sh "${opts[@]}" "$@" + conf/run_all.sh "${opts[@]}" "$@" for branch in release-1.x release-2.x do - scan.sh "${opts[@]}" "$@" "-Dsonar.branch.name=$branch" + conf/run_all.sh "${opts[@]}" "$@" "-Dsonar.branch.name=$branch" done done diff --git a/test/unit/test_apps.py b/test/unit/test_apps.py index eb68ef2db..a481dd0e4 100644 --- a/test/unit/test_apps.py +++ b/test/unit/test_apps.py @@ -76,7 +76,7 @@ def test_get_object_non_existing() -> None: return with pytest.raises(exceptions.ObjectNotFound) as e: _ = App.get_object(endpoint=tutil.SQ, key=NON_EXISTING_KEY) - assert str(e.value).endswith(f"Application key '{NON_EXISTING_KEY}' not found") + assert str(e.value).endswith(f"Application '{NON_EXISTING_KEY}' not found") def test_exists(get_test_app: Generator[App]) -> None: diff --git a/test/unit/test_branches.py b/test/unit/test_branches.py new file mode 100644 index 000000000..e89c80145 --- /dev/null +++ b/test/unit/test_branches.py @@ -0,0 +1,193 @@ +# +# sonar-tools tests +# Copyright (C) 2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +"""applications tests""" + +import pytest + +import utilities as tutil +from sonar import projects, branches, exceptions +import sonar.util.constants as c + +SUPPORTED_EDITIONS = (c.DE, c.EE, c.DCE) + + +def verify_branch_support(func: callable, **kwargs) -> bool: + if kwargs["concerned_object"].endpoint.edition() not in SUPPORTED_EDITIONS: + with pytest.raises(exceptions.UnsupportedOperation): + _ = func(**kwargs) + return False + return True + + +def test_get_object() -> None: + """Test get_object and verify that if requested twice the same object is returned""" + + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert str(obj) == f"branch 'develop' of project '{project.key}'" + obj.refresh() + + +def test_not_found() -> None: + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + with pytest.raises(exceptions.ObjectNotFound): + obj = branches.Branch.get_object(concerned_object=project, branch_name="non-existing") + + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + obj.name = "non-existing2" + with pytest.raises(exceptions.ObjectNotFound): + obj.refresh() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + obj.concerned_object.key = "non-existing2" + with pytest.raises(exceptions.ObjectNotFound): + obj.new_code() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_is_main_is_kept(): + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + obj._keep_when_inactive = None + obj.refresh() + assert obj.is_kept_when_inactive() in (True, False) + obj._is_main = None + assert obj.is_main() in (True, False) + + +def test_set_as_main(): + """test_set_as_main""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + main_br_name = project.main_branch_name() + main_br = branches.Branch.get_object(concerned_object=project, branch_name=main_br_name) + assert main_br.is_main() + assert not dev_br.is_main() + + if tutil.SQ.version() < (10, 0, 0): + with pytest.raises(exceptions.UnsupportedOperation): + dev_br.set_as_main() + return + + assert dev_br.set_as_main() + assert not main_br.is_main() + assert dev_br.is_main() + + assert main_br.set_as_main() + + main_br.name = "non-existing-main" + with pytest.raises(exceptions.ObjectNotFound): + main_br.set_as_main() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_set_keep_as_inactive(): + """test_set_keep_as_inactive""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + master_br = branches.Branch.get_object(concerned_object=project, branch_name="master") + assert dev_br.is_kept_when_inactive() + assert master_br.is_kept_when_inactive() + + assert dev_br.set_keep_when_inactive(False) + assert not dev_br.is_kept_when_inactive() + assert master_br.is_kept_when_inactive() + + assert dev_br.set_keep_when_inactive(True) + + dev_br.name = "non-existing-develop" + with pytest.raises(exceptions.ObjectNotFound): + dev_br.set_keep_when_inactive(True) + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_rename(): + """test_rename""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + main_br_name = project.main_branch_name() + main_br = branches.Branch.get_object(concerned_object=project, branch_name=main_br_name) + with pytest.raises(exceptions.UnsupportedOperation): + dev_br.rename("release") + + new_name = "gold" + assert main_br.rename(new_name) + assert not main_br.rename(new_name) + + new_br = branches.Branch.get_object(concerned_object=project, branch_name=new_name) + assert new_br is main_br + assert main_br.rename(main_br_name) + assert new_br.name == main_br_name + + +def test_get_findings(): + """test_get_findings""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert len(dev_br.get_findings()) > 0 + + dev_br.name = "non-existing-dev2" + with pytest.raises(exceptions.ObjectNotFound): + dev_br.get_findings() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_audit(): + """test_audit_off""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert len(dev_br.audit({"audit.project.branches": False})) == 0 + + dev_br.name = "non-existing-dev3" + assert len(dev_br.audit({})) == 0 + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_exists(): + """test_exists""" + if tutil.SQ.edition() == c.CE: + with pytest.raises(exceptions.UnsupportedOperation): + branches.exists(tutil.SQ, branch_name="develop", project_key=tutil.LIVE_PROJECT) + else: + assert branches.exists(tutil.SQ, branch_name="develop", project_key=tutil.LIVE_PROJECT) + assert not branches.exists(tutil.SQ, branch_name="foobar", project_key=tutil.LIVE_PROJECT) diff --git a/test/unit/test_findings.py b/test/unit/test_findings.py index 3a5cc5d8f..52efb5c49 100644 --- a/test/unit/test_findings.py +++ b/test/unit/test_findings.py @@ -56,7 +56,7 @@ f"--{opt.KEY_REGEXP} training:security -{opt.BRANCH_REGEXP_SHORT} main", f"--{opt.USE_FINDINGS} -{opt.KEY_REGEXP_SHORT} ({tutil.PROJECT_0}|{tutil.PROJECT_1})", f"--{opt.APPS} -{opt.KEY_REGEXP_SHORT} APP_TEST --{opt.BRANCH_REGEXP} .+", - f"--{opt.PORTFOLIOS} -{opt.KEY_REGEXP_SHORT} Banking -{opt.REPORT_FILE_SHORT} {tutil.CSV_FILE}", + # See issue #2011 f"--{opt.PORTFOLIOS} -{opt.KEY_REGEXP_SHORT} Banking -{opt.REPORT_FILE_SHORT} {tutil.CSV_FILE}", f"-{opt.KEY_REGEXP_SHORT} {tutil.PROJECT_0} -{opt.BRANCH_REGEXP_SHORT} .+", f"--{opt.STATUSES} OPEN,CLOSED --{opt.SEVERITIES} {idefs.STD_SEVERITY_BLOCKER},{idefs.STD_SEVERITY_CRITICAL}", ] @@ -271,6 +271,7 @@ def test_findings_export(csv_file: Generator[str]) -> None: def test_findings_export_long(csv_file: Generator[str]) -> None: """test_findings_export_long""" + pytest.skip("Test too long") cmd_csv = f"{CMD} --{opt.REPORT_FILE} {csv_file}" for opts in __GOOD_OPTS_LONG: assert tutil.run_cmd(findings_export.main, f"{cmd_csv} {opts}") == e.OK @@ -301,7 +302,7 @@ def test_issues_count_3() -> None: def test_search_issues_by_project() -> None: """test_search_issues_by_project""" nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=tutil.LIVE_PROJECT, search_findings=True)) - assert 100 <= nb_issues <= 3500 + assert 100 <= nb_issues <= 3700 nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=tutil.LIVE_PROJECT, params={"resolved": "false"})) assert nb_issues < 1800 nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=None)) diff --git a/test/unit/test_findings_sync.py b/test/unit/test_findings_sync.py index db2aaa10e..44159a813 100644 --- a/test/unit/test_findings_sync.py +++ b/test/unit/test_findings_sync.py @@ -36,7 +36,10 @@ CMD = "sonar-findings-sync.py" -PLAT_OPTS = f"{tutil.SQS_OPTS} -U {os.getenv('SONAR_HOST_URL_TEST')} -T {os.getenv('SONAR_TOKEN_SYNC_USER')}" +TEST_URL = os.getenv("SONAR_HOST_URL_TEST") +TEST_TOKEN = os.getenv("SONAR_TOKEN_SYNC_USER") +PLAT_OPTS = f"{tutil.SQS_OPTS} --{opt.URL_TARGET} {TEST_URL} --{opt.TOKEN_TARGET} {TEST_TOKEN}" +TEST_OPTS = f"--{opt.URL} {TEST_URL} --{opt.TOKEN} {TEST_TOKEN} --{opt.KEY_REGEXP} TESTSYNC" SC_PLAT_OPTS = f"{tutil.SQS_OPTS} -U https://sonarcloud.io -T {os.getenv('SONAR_TOKEN_SONARCLOUD')} -O okorach" SYNC_OPTS = f"-{opt.KEY_REGEXP_SHORT} {tutil.LIVE_PROJECT} -K TESTSYNC" @@ -46,13 +49,25 @@ def test_sync_help() -> None: assert tutil.run_cmd(findings_sync.main, f"{CMD} -h") == e.ARGS_ERROR -def test_sync_proj(json_file: Generator[str]) -> None: - """test_sync_proj""" +def test_sync_2_proj_all_branches(json_file: Generator[str]) -> None: + """test_sync_2_proj_all_branches""" assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK -def test_sync_branch(json_file: Generator[str]) -> None: - """test_sync_branch""" +def test_sync_same_proj_all_branches(json_file: Generator[str]) -> None: + """test_sync_same_proj_all_branches""" + # Project sync across all branches of a given project + pytest.skip("No yet supported") + assert tutil.run_cmd(findings_sync.main, f"{CMD} {TEST_OPTS} --{opt.REPORT_FILE} {json_file}") == e.OK + + +def test_sync_same_proj_2_branches(json_file: Generator[str]) -> None: + """test_sync_same_proj_2_branches""" + assert tutil.run_cmd(findings_sync.main, f"{CMD} {TEST_OPTS} -b main -B develop -K TESTSYNC --{opt.REPORT_FILE} {json_file}") == e.OK + + +def test_sync_2_proj_branches(json_file: Generator[str]) -> None: + """test_sync_2_proj_branches""" code = e.UNSUPPORTED_OPERATION if tutil.SQ.edition() == c.CE else e.OK assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -b master -B main -{opt.REPORT_FILE_SHORT} {json_file}") == code if tutil.SQ.edition() == c.CE: @@ -61,4 +76,4 @@ def test_sync_branch(json_file: Generator[str]) -> None: def test_sync_scloud(json_file: Generator[str]) -> None: """test_sync_scloud""" - assert tutil.run_cmd(findings_sync.main, f"{CMD} {SC_PLAT_OPTS} {SYNC_OPTS} --threads 16 -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK + assert tutil.run_cmd(findings_sync.main, f"{CMD} {SC_PLAT_OPTS} {SYNC_OPTS} --{opt.NBR_THREADS} 16 -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index f68e711b9..91f232bae 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -24,6 +24,8 @@ from datetime import datetime, timedelta import pytest +from requests.exceptions import ConnectionError + import utilities as tutil from sonar import issues, exceptions, logging from sonar import utilities as util @@ -96,7 +98,8 @@ def test_set_severity() -> None: assert issue.set_severity(new_sev) issue.refresh() assert issue.severity == new_sev - assert not issue.set_severity("NON_EXISTING") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_severity("NON_EXISTING") issue.set_severity(old_sev) assert not any(issue.set_mqr_severity(k, v) for k, v in new_impacts.items()) @@ -115,8 +118,10 @@ def test_set_severity() -> None: assert all(issue.set_mqr_severity(k, v) for k, v in new_impacts.items()) issue.refresh() assert issue.impacts == new_impacts - assert not issue.set_mqr_severity("MAINTAINABILITY", "NON_EXISTING") - assert not issue.set_mqr_severity("NON_EXISTING", "HIGH") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_mqr_severity("MAINTAINABILITY", "NON_EXISTING") + with pytest.raises(exceptions.SonarException): + issue.set_mqr_severity("NON_EXISTING", "HIGH") [issue.set_mqr_severity(k, v) for k, v in old_impacts.items()] tutil.SQ.set_mqr_mode(is_mqr) @@ -147,7 +152,8 @@ def test_set_type() -> None: assert issue.set_type(new_type) issue.refresh() assert issue.type == new_type - assert not issue.set_type("NON_EXISTING") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_type("NON_EXISTING") issue.set_type(old_type) @@ -176,7 +182,7 @@ def test_changelog() -> None: if tutil.SQ.version() < (10, 0, 0): nb_changes = 4 elif tutil.SQ.version() >= (2025, 4, 2): - nb_changes = 14 + nb_changes = 16 elif tutil.SQ.version() >= (25, 1, 0): nb_changes = 8 else: @@ -204,7 +210,7 @@ def test_changelog() -> None: author = None delta = timedelta(days=1) if tutil.SQ.version() >= (2025, 5, 0): - date_change = datetime(2025, 10, 3) + date_change = datetime(2025, 10, 12) elif tutil.SQ.version() >= (10, 0, 0): date_change = datetime(2025, 2, 13) else: @@ -250,10 +256,13 @@ def test_request_error() -> None: """test_request_error""" issues_d = issues.search_by_project(endpoint=tutil.TEST_SQ, project_key=tutil.PROJECT_1) issue = list(issues_d.values())[0] + url = tutil.TEST_SQ.local_url tutil.TEST_SQ.local_url = "http://localhost:3337" - assert not issue.add_comment("Won't work") - - assert not issue.assign("admin") + with pytest.raises(ConnectionError): + issue.add_comment("Won't work") + with pytest.raises(ConnectionError): + issue.assign("admin") + tutil.TEST_SQ.local_url = url def test_transitions() -> None: @@ -262,28 +271,37 @@ def test_transitions() -> None: issue = list(issues_d.values())[0] assert issue.confirm() - assert not issue.confirm() + with pytest.raises(exceptions.UnsupportedOperation): + issue.confirm() assert issue.unconfirm() - assert not issue.unconfirm() + with pytest.raises(exceptions.UnsupportedOperation): + issue.unconfirm() assert issue.resolve_as_fixed() - assert not issue.resolve_as_fixed() + with pytest.raises(exceptions.UnsupportedOperation): + issue.resolve_as_fixed() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + assert not issue.reopen() if tutil.SQ.version() >= c.ACCEPT_INTRO_VERSION: assert issue.accept() - assert not issue.accept() + with pytest.raises(exceptions.UnsupportedOperation): + issue.accept() else: assert issue.mark_as_wont_fix() - assert not issue.mark_as_wont_fix() + with pytest.raises(exceptions.UnsupportedOperation): + issue.mark_as_wont_fix() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + issue.reopen() assert issue.mark_as_false_positive() - assert not issue.mark_as_false_positive() + with pytest.raises(exceptions.UnsupportedOperation): + issue.mark_as_false_positive() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + issue.reopen() def test_search_first() -> None: diff --git a/test/unit/test_platform.py b/test/unit/test_platform.py index ccff53100..2bf79555f 100644 --- a/test/unit/test_platform.py +++ b/test/unit/test_platform.py @@ -22,7 +22,7 @@ """platform tests""" import json -from requests import RequestException +import requests.exceptions from datetime import datetime import pytest @@ -82,10 +82,11 @@ def test_wrong_url() -> None: tutil.TEST_SQ.local_url = "http://localhost:3337" tutil.TEST_SQ._sys_info = None - with pytest.raises(RequestException): + with pytest.raises(requests.exceptions.ConnectionError): tutil.TEST_SQ.sys_info() - tutil.TEST_SQ.global_permissions() + with pytest.raises(requests.exceptions.ConnectionError): + tutil.TEST_SQ.global_permissions() def test_set_webhooks() -> None: diff --git a/test/unit/test_projects.py b/test/unit/test_projects.py index 5c261c9e9..e61b1b315 100644 --- a/test/unit/test_projects.py +++ b/test/unit/test_projects.py @@ -54,12 +54,15 @@ def test_create_delete() -> None: """test_create_delete""" proj = projects.Project.create(endpoint=tutil.SQ, key=tutil.TEMP_KEY, name="temp") assert proj.key == tutil.TEMP_KEY + assert proj.main_branch_name() == "main" if tutil.SQ.edition() != c.CE: assert proj.main_branch().name == "main" proj.rename_main_branch("foobar") assert proj.main_branch().name == "foobar" else: - assert proj.main_branch_name() == "main" + with pytest.raises(exceptions.UnsupportedOperation): + proj.main_branch() + assert proj.delete() with pytest.raises(exceptions.ObjectNotFound): proj.refresh() @@ -132,6 +135,16 @@ def test_import_sync() -> None: assert proj.import_zip(asynchronous=False).startswith("FAILED") +def test_import_no_zip(get_test_project: Generator[projects.Project]) -> None: + """test_import_no_zip""" + if tutil.SQ.edition() == c.CE: + pytest.skip("No zip import in Community Build") + assert get_test_project.import_zip(asynchronous=False) == f"FAILED/ZIP_MISSING" + get_test_project.key = "non-existing" + res = get_test_project.import_zip(asynchronous=False) + assert res.startsWith("FAILED/") and "not found" in res + + def test_monorepo() -> None: """test_monorepo""" proj = projects.Project.get_object(endpoint=tutil.SQ, key=tutil.LIVE_PROJECT) @@ -143,10 +156,13 @@ def test_monorepo() -> None: def test_get_findings() -> None: """test_get_findings""" proj = projects.Project.get_object(endpoint=tutil.SQ, key=tutil.LIVE_PROJECT) - assert len(proj.get_findings(branch="non-existing-branch")) == 0 + with pytest.raises(exceptions.ObjectNotFound): + proj.get_findings(branch="non-existing-branch") if tutil.SQ.edition() != c.CE: assert len(proj.get_findings(branch="develop")) > 0 - assert len(proj.get_findings(pr="1")) == 0 + with pytest.raises(exceptions.ObjectNotFound): + proj.get_findings(pr="1") + assert len(proj.get_findings(pr="5")) == 0 def test_count_third_party_issues() -> None: @@ -201,6 +217,11 @@ def test_already_exists() -> None: projects.Project.create(endpoint=tutil.SQ, key=tutil.EXISTING_PROJECT, name="name") +def test_exists() -> None: + assert projects.exists(tutil.SQ, tutil.LIVE_PROJECT) + assert not projects.exists(tutil.SQ, "non-existing") + + def test_binding() -> None: """test_binding""" if tutil.SQ.edition() == c.CE: @@ -228,11 +249,13 @@ def test_import_wrong_key(get_test_project: Generator[projects.Project]) -> None """test_import_wrong_key""" proj = get_test_project proj.key = tutil.NON_EXISTING_KEY - expected_exception = exceptions.ObjectNotFound if tutil.SQ.edition() in (c.EE, c.DCE) else exceptions.UnsupportedOperation if tutil.SQ.edition() in (c.EE, c.DCE): - with pytest.raises(expected_exception): + assert proj.import_zip(asynchronous=True) == "FAILED/PROJECT_NOT_FOUND" + assert proj.import_zip(asynchronous=False) == "FAILED/PROJECT_NOT_FOUND" + else: + with pytest.raises(exceptions.UnsupportedOperation): proj.import_zip(asynchronous=True) - with pytest.raises(expected_exception): + with pytest.raises(exceptions.UnsupportedOperation): proj.import_zip(asynchronous=False) @@ -249,7 +272,8 @@ def test_set_links(get_test_project: Generator[projects.Project]) -> None: proj = get_test_project proj.set_links({"links": [{"type": "custom", "name": "google", "url": "https://google.com"}]}) proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_links({"links": [{"type": "custom", "name": "yahoo", "url": "https://yahoo.com"}]}) + with pytest.raises(exceptions.ObjectNotFound): + proj.set_links({"links": [{"type": "custom", "name": "yahoo", "url": "https://yahoo.com"}]}) def test_set_tags(get_test_project: Generator[projects.Project]) -> None: @@ -271,37 +295,43 @@ def test_set_quality_gate(get_test_project: Generator[projects.Project], get_tes qg = get_test_quality_gate assert proj.set_quality_gate(qg.name) assert not proj.set_quality_gate(None) - assert not proj.set_quality_gate(tutil.NON_EXISTING_KEY) + with pytest.raises(exceptions.ObjectNotFound): + proj.set_quality_gate(tutil.NON_EXISTING_KEY) proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_quality_gate(qg.name) + with pytest.raises(exceptions.ObjectNotFound): + assert not proj.set_quality_gate(qg.name) def test_ai_code_assurance(get_test_project: Generator[projects.Project]) -> None: """test_ai_code_assurance""" proj = get_test_project - if tutil.SQ.version() >= (10, 7, 0) and tutil.SQ.edition() != c.CE: - proj = get_test_project - assert proj.set_contains_ai_code(True) - assert proj.get_ai_code_assurance() in ( - "CONTAINS_AI_CODE", - "AI_CODE_ASSURED", - "AI_CODE_ASSURANCE_ON", - "AI_CODE_ASSURANCE_OFF", - "AI_CODE_ASSURANCE_PASS", - "AI_CODE_ASSURANCE_FAIL", - "NONE", - ) - assert proj.set_contains_ai_code(False) - assert proj.get_ai_code_assurance() == "NONE" - proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_contains_ai_code(True) - assert proj.get_ai_code_assurance() is None - assert not proj.set_contains_ai_code(False) - assert proj.get_ai_code_assurance() is None - else: + if tutil.SQ.version() < (10, 7, 0) or tutil.SQ.edition() == c.CE: with pytest.raises(exceptions.UnsupportedOperation): proj.get_ai_code_assurance() + return + proj = get_test_project + assert proj.set_contains_ai_code(True) + assert proj.get_ai_code_assurance() in ( + "CONTAINS_AI_CODE", + "AI_CODE_ASSURED", + "AI_CODE_ASSURANCE_ON", + "AI_CODE_ASSURANCE_OFF", + "AI_CODE_ASSURANCE_PASS", + "AI_CODE_ASSURANCE_FAIL", + "NONE", + ) + assert proj.set_contains_ai_code(False) + assert proj.get_ai_code_assurance() == "NONE" + proj.key = tutil.NON_EXISTING_KEY + with pytest.raises(exceptions.ObjectNotFound): + proj.set_contains_ai_code(True) + with pytest.raises(exceptions.ObjectNotFound): + assert proj.get_ai_code_assurance() + with pytest.raises(exceptions.ObjectNotFound): + proj.set_contains_ai_code(False) + with pytest.raises(exceptions.ObjectNotFound): + proj.get_ai_code_assurance() def test_set_quality_profile(get_test_project: Generator[projects.Project], get_test_qp: Generator[qualityprofiles.QualityProfile]) -> None: @@ -339,8 +369,10 @@ def test_wrong_key_2(get_test_project: Generator[projects.Project]) -> None: """test_wrong_key""" proj = get_test_project proj.key = tutil.NON_EXISTING_KEY - assert proj.webhooks() is None - assert proj.links() is None + with pytest.raises(exceptions.ObjectNotFound): + _ = proj.webhooks() + with pytest.raises(exceptions.ObjectNotFound): + _ = proj.links() # assert proj.quality_gate() is None with pytest.raises(exceptions.ObjectNotFound): proj.audit({}) diff --git a/test/unit/test_qp.py b/test/unit/test_qp.py index 04ba45e9a..55d8cf9ed 100644 --- a/test/unit/test_qp.py +++ b/test/unit/test_qp.py @@ -159,7 +159,7 @@ def test_import() -> None: languages.Language.CACHE.clear() qualityprofiles.QualityProfile.CACHE.clear() # delete all quality profiles in test - _ = [qp.set_as_default() for qp in qualityprofiles.get_list(tutil.TEST_SQ).values() if qp.name == tutil.SONAR_WAY] + _ = [qp.set_as_default() for qp in qualityprofiles.get_list(tutil.TEST_SQ, use_cache=False).values() if qp.name == tutil.SONAR_WAY] qp_list = {o for o in qualityprofiles.get_list(tutil.TEST_SQ, use_cache=False).values() if not o.is_built_in and not o.is_default} _ = [o.delete() for o in qp_list] with open(f"{tutil.FILES_ROOT}/config.json", "r", encoding="utf-8") as f: diff --git a/test/unit/test_rules.py b/test/unit/test_rules.py index 022463ea8..617723afb 100644 --- a/test/unit/test_rules.py +++ b/test/unit/test_rules.py @@ -70,7 +70,7 @@ def test_rules_misspelled_language_2(csv_file: Generator[str]) -> None: def test_get_rule() -> None: """test_get_rule""" - myrule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + myrule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert str(myrule) == "rule key 'java:S127'" myrule = rules.Rule.load(endpoint=tutil.SQ, key="java:S127", data={}) assert str(myrule) == "rule key 'java:S127'" @@ -78,7 +78,7 @@ def test_get_rule() -> None: def test_set_tags() -> None: """test_set_tags""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule.set_tags(tutil.TAGS) assert my_rule.tags == sorted(tutil.TAGS) assert my_rule.reset_tags() @@ -87,7 +87,7 @@ def test_set_tags() -> None: def test_set_desc() -> None: """test_set_tags""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule.set_description("Blah blah") assert my_rule.custom_desc == "Blah blah" assert my_rule.reset_description() @@ -109,7 +109,7 @@ def test_facets() -> None: def test_get_rule_cache() -> None: """test_get_rule_cache""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert str(my_rule) == "rule key 'java:S127'" new_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule is new_rule @@ -143,7 +143,7 @@ def test_export_all() -> None: def test_new_taxo() -> None: """test_new_taxo""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") if tutil.SQ.version() >= c.MQR_INTRO_VERSION: for qual, sev in my_rule.impacts().items(): assert qual in idefs.MQR_QUALITIES diff --git a/test/unit/test_webhooks.py b/test/unit/test_webhooks.py index 4ed9f9991..2de5dd7d6 100644 --- a/test/unit/test_webhooks.py +++ b/test/unit/test_webhooks.py @@ -85,13 +85,18 @@ def test_export() -> None: def test_create_delete() -> None: """test_create_delete""" - hook = wh.WebHook.create(tutil.SQ, tutil.TEMP_KEY, "http://google.com", "Shhht", tutil.PROJECT_1) + if tutil.SQ.version() >= (10, 0, 0): + with pytest.raises(exceptions.SonarException): + # Secret too short + wh.WebHook.create(tutil.SQ, tutil.TEMP_KEY, "http://google.com", "Shhht", tutil.PROJECT_1) + hook = wh.WebHook.create(tutil.SQ, tutil.TEMP_KEY, "http://google.com", "Shhht012345678910", tutil.PROJECT_1) assert hook.name == tutil.TEMP_KEY assert hook.webhook_url == "http://google.com" - assert hook.secret == "Shhht" + assert hook.secret == "Shhht012345678910" assert hook.project == tutil.PROJECT_1 hook.refresh() hook.delete() - with pytest.raises(exceptions.ObjectNotFound): - hook.refresh() + if tutil.SQ.version() >= (10, 0, 0): + with pytest.raises(exceptions.ObjectNotFound): + hook.refresh()