diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index c5463ecdc..5ffd9b206 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,8 +28,12 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - if [ -f requirements-to-build.txt ]; then pip install -r requirements-to-build.txt; fi + python -m pip install poetry + poetry install + - name: Build package + run: | + poetry build + continue-on-error: false # Linting is done in the run_linters.sh script - name: Prep tests @@ -49,9 +53,10 @@ jobs: - name: Run linters working-directory: . run: | + python -m pip install ruff pylint flake8 chmod +x conf/run_linters.sh conf/run_linters.sh - #- name: Cache SonarQube packages + # - name: Cache SonarQube packages # uses: actions/cache@v4 # with: # path: ./.sonar diff --git a/.vscode/settings.json b/.vscode/settings.json index f1e39083f..cf07e1a93 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -6,5 +6,10 @@ "sonarlint.focusOnNewCode": true, "pylint.args": [ "[\"--rcfile=conf/pylintrc\"]" - ] + ], + "python.testing.pytestArgs": [ + "test" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } \ No newline at end of file diff --git a/README.md b/README.md index 339d97724..595b65ceb 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ deletes tokens created since more than a certain number of days - [Release notes](https://github.com/okorach/sonar-tools/releases) # Requirements and Installation -- `sonar-tools` requires python 3.8 or higher +- `sonar-tools` requires python 3.9 or higher - Installation is based on [pip](https://pypi.org/project/pip/). - Online installation. - Run: `python3 -m pip install sonar-tools` (or `python3 -m pip upgrade sonar-tools`) diff --git a/cli/audit.py b/cli/audit.py index 07f6294c6..53de1cf08 100755 --- a/cli/audit.py +++ b/cli/audit.py @@ -18,15 +18,13 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" +"""Audits a SonarQube platform""" - Audits a SonarQube platform +from __future__ import annotations -""" -import sys import json import csv -from typing import TextIO +from typing import TextIO, Optional from threading import Thread from queue import Queue from requests import RequestException @@ -57,7 +55,7 @@ def _audit_sif(sysinfo: str, audit_settings: types.ConfigSettings) -> tuple[str, """Audits a SIF and return found problems""" log.info("Auditing SIF file '%s'", sysinfo) try: - with open(sysinfo, "r", encoding="utf-8") as f: + with open(sysinfo, encoding="utf-8") as f: sysinfo = json.loads(f.read()) except json.decoder.JSONDecodeError: log.critical("File %s does not seem to be a legit JSON file", sysinfo) @@ -73,7 +71,7 @@ def _audit_sif(sysinfo: str, audit_settings: types.ConfigSettings) -> tuple[str, def write_csv(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.ConfigSettings) -> None: - """Writes the CSV file of audit problems""" + """Thread callback to write audit problems in a CSV file""" server_id = settings.get("SERVER_ID", None) with_url = settings.get("WITH_URL", False) csvwriter = csv.writer(fd, delimiter=settings.get("CSV_DELIMITER", ",")) @@ -92,9 +90,7 @@ def write_csv(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.C def write_json(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.ConfigSettings) -> None: - """ - Thread to write problems in a JSON file - """ + """Thread callback to write problems in a JSON file""" server_id = settings.get("SERVER_ID", None) with_url = settings.get("WITH_URL", False) comma = "" @@ -113,7 +109,7 @@ def write_json(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types. def _audit_sq( - sq: platform.Platform, settings: types.ConfigSettings, what_to_audit: list[str] = None, key_list: types.KeyList = None + sq: platform.Platform, settings: types.ConfigSettings, what_to_audit: Optional[list[str]] = None, key_list: types.KeyList = None ) -> list[problem.Problem]: """Audits a SonarQube/Cloud platform""" everything = what_to_audit is None @@ -178,9 +174,8 @@ def __parser_args(desc: str) -> object: def __check_keys_exist(key_regexp: list[str], sq: platform.Platform, what: list[str]) -> None: """Checks if project keys exist""" - if key_regexp and "projects" in what: - if len(component_helper.get_components(sq, "projects", key_regexp)) == 0: - raise options.ArgumentsError(f"No projects found with key matching regexp '{key_regexp}'") + if key_regexp and "projects" in what and len(component_helper.get_components(sq, "projects", key_regexp)) == 0: + raise options.ArgumentsError(f"No projects found with key matching regexp '{key_regexp}'") def main() -> None: diff --git a/cli/config.py b/cli/config.py index 4f008080a..ba6499c45 100644 --- a/cli/config.py +++ b/cli/config.py @@ -19,8 +19,9 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Exports SonarQube platform configuration as JSON +Exports SonarQube platform configuration as JSON """ + from typing import TextIO from threading import Thread from queue import Queue diff --git a/cli/cust_measures.py b/cli/cust_measures.py index 09b00f47a..d5751c136 100644 --- a/cli/cust_measures.py +++ b/cli/cust_measures.py @@ -19,10 +19,10 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - This script manipulates custom measures. You may: +This script manipulates custom measures. You may: - Update a custom measure value: - Usage: cust_measures.py -t -u -k -m --updateValue +Update a custom measure value: + Usage: cust_measures.py -t -u -k -m --updateValue """ import sys diff --git a/cli/findings_export.py b/cli/findings_export.py index 00f9df321..0d95e34ae 100755 --- a/cli/findings_export.py +++ b/cli/findings_export.py @@ -19,9 +19,9 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - This script exports findings as CSV, JSON, or SARIF +This script exports findings as CSV, JSON, or SARIF - Usage: sonar-findings-export.py -t -u [] +Usage: sonar-findings-export.py -t -u [] """ diff --git a/cli/findings_sync.py b/cli/findings_sync.py index 6db37be93..f79b99aa2 100755 --- a/cli/findings_sync.py +++ b/cli/findings_sync.py @@ -19,13 +19,13 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - This script propagates the manual issue changes (FP, WF, Change - of severity, of issue type, comments) from: - - One project to another (normally on different platforms but not necessarily). - The 2 platform don't need to be identical in version, edition or plugins - - One branch of a project to another branch of the same project (normally LLBs) +This script propagates the manual issue changes (FP, WF, Change +of severity, of issue type, comments) from: +- One project to another (normally on different platforms but not necessarily). + The 2 platform don't need to be identical in version, edition or plugins +- One branch of a project to another branch of the same project (normally LLBs) - Only issues with a 100% match are synchronized. When there's a doubt, nothing is done +Only issues with a 100% match are synchronized. When there's a doubt, nothing is done """ import datetime diff --git a/cli/housekeeper.py b/cli/housekeeper.py index ff50b6b20..d9566097c 100644 --- a/cli/housekeeper.py +++ b/cli/housekeeper.py @@ -20,12 +20,13 @@ # """ - Removes obsolete data from SonarQube platform - Currently: - - projects, branches, PR not analyzed since a given number of days - - Tokens not renewed since a given number of days +Removes obsolete data from SonarQube platform +Currently: +- projects, branches, PR not analyzed since a given number of days +- Tokens not renewed since a given number of days """ + import sys from requests import RequestException diff --git a/cli/loc.py b/cli/loc.py index 86b8a2eb2..0f886c22d 100644 --- a/cli/loc.py +++ b/cli/loc.py @@ -19,8 +19,9 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Exports LoC per projects +Exports LoC per projects """ + import sys import csv import datetime diff --git a/cli/measures_export.py b/cli/measures_export.py index bec516c90..1a9b01728 100755 --- a/cli/measures_export.py +++ b/cli/measures_export.py @@ -19,11 +19,12 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Exports some measures of all projects - - Either all measures (-m _all) - - Or the main measures (-m _main) - - Or a custom selection of measures (-m ) +Exports some measures of all projects +- Either all measures (-m _all) +- Or the main measures (-m _main) +- Or a custom selection of measures (-m ) """ + import sys import csv from requests import RequestException diff --git a/cli/options.py b/cli/options.py index 8d1f5fc33..e6b91eb70 100644 --- a/cli/options.py +++ b/cli/options.py @@ -19,7 +19,7 @@ # """ - Cmd line options +Cmd line options """ diff --git a/cli/projects_cli.py b/cli/projects_cli.py index ec4c3cb1b..5ab525861 100644 --- a/cli/projects_cli.py +++ b/cli/projects_cli.py @@ -20,11 +20,10 @@ # """ - Exports/Imports all projects of a SonarQube Server platform +Exports/Imports all projects of a SonarQube Server platform """ -import sys import json from requests import RequestException diff --git a/cli/projects_export.py b/cli/projects_export.py index 4543102ee..9799dbf6e 100755 --- a/cli/projects_export.py +++ b/cli/projects_export.py @@ -20,9 +20,10 @@ # """ - Exports all projects of a SonarQube platform +Exports all projects of a SonarQube platform """ + import sys from unittest.mock import patch diff --git a/cli/projects_import.py b/cli/projects_import.py index 1443ad403..e1045798c 100755 --- a/cli/projects_import.py +++ b/cli/projects_import.py @@ -20,9 +20,10 @@ # """ - Imports a list of projects to a SonarQube platform +Imports a list of projects to a SonarQube platform """ + import sys from unittest.mock import patch diff --git a/cli/rules_cli.py b/cli/rules_cli.py index b441445d5..c5fe9d79c 100755 --- a/cli/rules_cli.py +++ b/cli/rules_cli.py @@ -19,8 +19,9 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Exports rules +Exports rules """ + import sys import csv diff --git a/cli/sonar_tools.py b/cli/sonar_tools.py index dd6040963..e99976624 100755 --- a/cli/sonar_tools.py +++ b/cli/sonar_tools.py @@ -21,7 +21,7 @@ """Main entry point for sonar-tools""" -from sonar import version +from sonar import version, utilities, errcodes def main() -> None: @@ -48,6 +48,7 @@ def main() -> None: See tools built-in -h help and https://github.com/okorach/sonar-tools for more documentation """ ) + utilities.final_exit(errcodes.OK) if __name__ == "__main__": diff --git a/cli/support.py b/cli/support.py index 60c3585fd..dc33a621b 100755 --- a/cli/support.py +++ b/cli/support.py @@ -20,9 +20,10 @@ # """ - Audits a SUPPORT ticket SIF +Audits a SUPPORT ticket SIF """ + from http import HTTPStatus import sys import os diff --git a/conf/build.sh b/conf/build.sh index c5ee435e4..6f1065538 100755 --- a/conf/build.sh +++ b/conf/build.sh @@ -16,15 +16,16 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -SONAR_TOOLS_RELEASE="$ROOTDIR/sonar/version.py" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" build_docs=0 build_docker=0 -while [ $# -ne 0 ]; do - case $1 in +. "${CONF_DIR}/env.sh" + +while [[ $# -ne 0 ]]; do + case "${1}" in docs|doc) build_docs=1 ;; @@ -40,18 +41,17 @@ done echo "======= FORMATTING CODE =========" ruff format echo "======= BUILDING PACKAGE =========" -rm -rf "$ROOTDIR/build/lib/sonar" "$ROOTDIR/build/lib/cli" "$ROOTDIR"/build/scripts*/sonar-tools "$ROOTDIR"/dist/sonar_tools* +rm -rf "${ROOT_DIR}/build/lib/sonar" "${ROOT_DIR}/build/lib/cli" "${ROOT_DIR}"/build/scripts*/sonar-tools "${ROOT_DIR}"/dist/sonar_tools* # python -m build poetry build -if [ "$build_docs" == "1" ]; then +if [[ "${build_docs}" = "1" ]]; then echo "======= BUILDING DOCS =========" rm -rf doc/api/build sphinx-build -b html doc/api/source doc/api/build fi -if [ "$build_docker" == "1" ]; then +if [[ "${build_docker}" = "1" ]]; then echo "======= BUILDING DOCKER IMAGE WITH SNAPSHOT =========" - version=$(grep PACKAGE_VERSION "$SONAR_TOOLS_RELEASE" | cut -d "=" -f 2 | cut -d '"' -f 2) - docker build -t "olivierkorach/sonar-tools:$version-snapshot" -t olivierkorach/sonar-tools:latest -f "$CONFDIR/snapshot.Dockerfile" "$ROOTDIR" --load + docker build -t "olivierkorach/sonar-tools:${VERSION}-snapshot" -t olivierkorach/sonar-tools:latest -f "${CONF_DIR}/snapshot.Dockerfile" "${ROOT_DIR}" --load fi diff --git a/conf/build_tests.sh b/conf/build_tests.sh index 2bde286d1..7decfe20c 100755 --- a/conf/build_tests.sh +++ b/conf/build_tests.sh @@ -19,9 +19,9 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -cd "$ROOTDIR/test/unit" || exit 1 +cd "${ROOT_DIR}/test/unit" || exit 1 export GEN_LOC=test/gen @@ -30,23 +30,23 @@ echo "Generating edition / version specific tests" for target in lts latest cb 9 9-ce common do - echo "Generating tests for $target" - rm -rf "${ROOTDIR:?}/${GEN_LOC:?}/$target" - mkdir -p "${ROOTDIR:?}/${GEN_LOC:?}/$target" 2>/dev/null - if [ "$target" == "common" ]; then - b=$(basename "$f" .py) - cp conftest.py utilities.py credentials.py "$ROOTDIR/$GEN_LOC/$target" - cp test_common*.py "$ROOTDIR/$GEN_LOC/$target" + echo "Generating tests for ${target}" + rm -rf "${ROOT_DIR:?}/${GEN_LOC:?}/${target}" + mkdir -p "${ROOT_DIR:?}/${GEN_LOC:?}/${target}" 2>/dev/null + if [[ "${target}" = "common" ]]; then + b=$(basename "${f}" .py) + cp conftest.py utilities.py credentials.py "${ROOT_DIR}/${GEN_LOC}/${target}" + cp test_common*.py "${ROOT_DIR}/${GEN_LOC}/${target}" else for f in *.py do - b=$(basename "$f" .py) - cp "$f" "$ROOTDIR/$GEN_LOC/$target/${b}_${target}.py" + b=$(basename "${f}" .py) + cp "${f}" "${ROOT_DIR}/${GEN_LOC}/${target}/${b}_${target}.py" done - cp "credentials-$target.py" "$ROOTDIR/$GEN_LOC/$target/credentials.py" - mv "$ROOTDIR/$GEN_LOC/$target/conftest_${target}.py" "$ROOTDIR/$GEN_LOC/$target/conftest.py" - mv "$ROOTDIR/$GEN_LOC/$target/utilities_${target}.py" "$ROOTDIR/$GEN_LOC/$target/utilities.py" - rm "$ROOTDIR/$GEN_LOC/$target/"test_common*.py + cp "credentials-${target}.py" "${ROOT_DIR}/${GEN_LOC}/${target}/credentials.py" + mv "${ROOT_DIR}/${GEN_LOC}/${target}/conftest_${target}.py" "${ROOT_DIR}/${GEN_LOC}/${target}/conftest.py" + mv "${ROOT_DIR}/${GEN_LOC}/${target}/utilities_${target}.py" "${ROOT_DIR}/${GEN_LOC}/${target}/utilities.py" + rm "${ROOT_DIR}/${GEN_LOC}/${target}/"test_common*.py fi done diff --git a/conf/deploy.sh b/conf/deploy.sh index 4435d25f1..c36adf439 100755 --- a/conf/deploy.sh +++ b/conf/deploy.sh @@ -19,14 +19,14 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" deps=0 -"$CONFDIR"/build.sh "$@" +"${CONF_DIR}"/build.sh "$@" -while [ $# -ne 0 ]; do - case $1 in +while [[ $# -ne 0 ]]; do + case "${1}" in deps) deps=1 ;; @@ -37,9 +37,9 @@ while [ $# -ne 0 ]; do done # Deploy locally for tests -if [ "$deps" == "1" ]; then +if [[ "${deps}" = "1" ]]; then pipopts="--upgrade" else pipopts="--no-deps" fi -pip install "$pipopts" --force-reinstall "$ROOTDIR"/dist/sonar_tools-*-py3-*.whl +pip install "${pipopts}" --force-reinstall "${ROOT_DIR}"/dist/sonar_tools-*-py3-*.whl diff --git a/conf/env.sh b/conf/env.sh new file mode 100644 index 000000000..44ee3bbcb --- /dev/null +++ b/conf/env.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# +# sonar-tools +# Copyright (C) 2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +VERSION=$(grep PACKAGE_VERSION "${ROOT_DIR}/sonar/version.py" | cut -d "=" -f 2 | cut -d '"' -f 2) +BUILD_DIR="${ROOT_DIR}/build" + +PYLINT_REPORT="${BUILD_DIR}/pylint-report.out" +# banditReport="${BUILD_DIR}/bandit-report.json" +FLAKE8_REPORT="${BUILD_DIR}/flake8-report.out" +SHELLCHECK_REPORT="${BUILD_DIR}/external-issues-shellcheck.json" +TRIVY_REPORT="${BUILD_DIR}/external-issues-trivy.json" +RUFF_REPORT="${BUILD_DIR}/external-issues-ruff.json" \ No newline at end of file diff --git a/conf/prep_all_tests.sh b/conf/prep_all_tests.sh index 7a84b01a3..3f4712b6e 100755 --- a/conf/prep_all_tests.sh +++ b/conf/prep_all_tests.sh @@ -21,32 +21,44 @@ SYNC_PROJECT_KEY="TESTSYNC" -# Deletes and recreates a fresh $SYNC_PROJECT_KEY project in SonarQube -curl -X POST -u "$SONAR_TOKEN_LATEST_ADMIN_USER:" "http://localhost:20010/api/projects/delete?project=$SYNC_PROJECT_KEY" -conf/scan.sh -Dsonar.host.url=http://localhost:20010 -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.token="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" +function create_fresh_project { + key="${1}" + url="${2}" + usertoken="${3}" + token="${4}" + shift 3 + opts=("$@") + opt_org="" + if [[ "${url}" = "https://sonarcloud.io" ]]; then + opt_org="-Dsonar.organization=okorach" + fi + opt_token="-Dsonar.token=${token}" + if [[ "${url}" = "${SONAR_HOST_URL_9}" ]]; then + opt_token="-Dsonar.login=${token}" + fi + curl -X POST -u "${usertoken}:" "${url}/api/projects/delete?project=${key}" + conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" + conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" -Dsonar.branch.name=develop + conf/run_scanner.sh "${opts[@]}" -Dsonar.projectKey="${key}" -Dsonar.projectName="${key}" -Dsonar.host.url="${url}" "${opt_token}" "${opt_org}" -Dsonar.branch.name=release-3.x + return 0 +} -curl -X POST -u "$SONAR_TOKEN_LATEST_ADMIN_USER:" "http://localhost:10000/api/projects/delete?project=$SYNC_PROJECT_KEY" -sonar-scanner -Dsonar.host.url=http://localhost:10000 -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.login="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -Dsonar.token="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -curl -X POST -u "$SONAR_TOKEN_LATEST_ADMIN_USER:" "http://localhost:20010/api/projects/delete?project=$SYNC_PROJECT_KEY" -sonar-scanner -Dsonar.host.url=http://localhost:20010 -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.login="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -Dsonar.token="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -curl -X POST -u "$SONAR_TOKEN_LATEST_ADMIN_USER:" "http://localhost:7000/api/projects/delete?project=$SYNC_PROJECT_KEY" -sonar-scanner -Dsonar.host.url=http://localhost:7000 -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.login="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -Dsonar.token="$SONAR_TOKEN_LATEST_ADMIN_ANALYSIS" -curl -X POST -u "$SONAR_TOKEN_9_ADMIN_USER:" "http://localhost:9000/api/projects/delete?project=$SYNC_PROJECT_KEY" -sonar-scanner -Dsonar.host.url=http://localhost:9000 -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.login="$SONAR_TOKEN_9_ADMIN_ANALYSIS" -Dsonar.token="$SONAR_TOKEN_9_ADMIN_ANALYSIS" +conf/run_linters.sh -curl -X POST -u "$SONAR_TOKEN_SONARCLOUD:" "https://sonarcloud.io/api/projects/delete?project=$SYNC_PROJECT_KEY" -sonar-scanner -Dsonar.host.url=https://sonarcloud.io -Dsonar.projectKey=$SYNC_PROJECT_KEY -Dsonar.projectName=$SYNC_PROJECT_KEY -Dsonar.organization=okorach -Dsonar.login="$SONAR_TOKEN_SONARCLOUD" -Dsonar.token="$SONAR_TOKEN_SONARCLOUD" - -sonar-scanner -Dsonar.host.url=http://localhost:10000 -Dsonar.pullrequest.key=5 -Dsonar.pullrequest.branch=feature/5 -sonar-scanner -Dsonar.host.url=http://localhost:10000 -Dsonar.pullrequest.key=7 -Dsonar.pullrequest.branch=feature/7 - -sonar-scanner -Dsonar.host.url=http://localhost:8000 -Dsonar.pullrequest.key=5 -Dsonar.pullrequest.branch=feature/5 -Dsonar.login="$SONAR_TOKEN_LTS_ADMIN_ANALYSIS" -sonar-scanner -Dsonar.host.url=http://localhost:8000 -Dsonar.pullrequest.key=7 -Dsonar.pullrequest.branch=feature/7 -Dsonar.login="$SONAR_TOKEN_LTS_ADMIN_ANALYSIS" - -sonar-scanner -Dsonar.host.url=http://localhost:7000 -Dsonar.login="$SONAR_TOKEN_CB_ADMIN_ANALYSIS" +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_TEST:?}" "${SONAR_TOKEN_TEST_ADMIN_USER}" "${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_LATEST:?}" "${SONAR_TOKEN_LATEST_ADMIN_USER}" "${SONAR_TOKEN_LATEST_ADMIN_ANALYSIS}" +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_CB:?}" "${SONAR_TOKEN_CB_ADMIN_USER}" "${SONAR_TOKEN_CB_ADMIN_ANALYSIS}" +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_9:?}" "${SONAR_TOKEN_9_ADMIN_USER}" "${SONAR_TOKEN_9_ADMIN_ANALYSIS}" +create_fresh_project "${SYNC_PROJECT_KEY}" "https://sonarcloud.io" "${SONAR_TOKEN_SONARCLOUD}" "${SONAR_TOKEN_SONARCLOUD}" +create_fresh_project "${SYNC_PROJECT_KEY}" "${SONAR_HOST_URL_CB:?}" "${SONAR_TOKEN_CB_ADMIN_USER}" "${SONAR_TOKEN_CB_ADMIN_ANALYSIS}" +for pr in 5 7; do + sonar-scanner -Dsonar.host.url="${SONAR_HOST_URL_LATEST:?}" -Dsonar.pullrequest.key="${pr}" -Dsonar.pullrequest.branch="feature/${pr}" -Dsonar.token="${SONAR_TOKEN_LATEST_ADMIN_ANALYSIS}" + sonar-scanner -Dsonar.host.url="${SONAR_HOST_URL_LTS:?}" -Dsonar.pullrequest.key="${pr}" -Dsonar.pullrequest.branch="feature/${pr}" -Dsonar.token="${SONAR_TOKEN_LTS_ADMIN_ANALYSIS}" +done # Format for 10.x and 9.x is different, file was generated for 10.x, so removing for 9.9 -rm build/external-issues* -sonar-scanner -Dsonar.host.url=http://localhost:9000 -Dsonar.pullrequest.key=5 -Dsonar.pullrequest.branch=feature/5 -Dsonar.login="$SONAR_TOKEN_9_ADMIN_ANALYSIS" -sonar-scanner -Dsonar.host.url=http://localhost:9000 -Dsonar.pullrequest.key=7 -Dsonar.pullrequest.branch=feature/7 -Dsonar.login="$SONAR_TOKEN_9_ADMIN_ANALYSIS" +rm build/external-issues* +for pr in 5 7; do + sonar-scanner -Dsonar.host.url="${SONAR_HOST_URL_9:?}" -Dsonar.pullrequest.key="${pr}" -Dsonar.pullrequest.branch="feature/${pr}" -Dsonar.login="${SONAR_TOKEN_9_ADMIN_ANALYSIS}" +done \ No newline at end of file diff --git a/conf/pylintrc b/conf/pylintrc index 1ef335f24..55b7d4baa 100644 --- a/conf/pylintrc +++ b/conf/pylintrc @@ -91,7 +91,9 @@ disable=raw-checker-failed, R0205, W0107, R1721, - R1711 + R1711, + R0913 + # Enable the message, report, category or checker with the given id(s). You can diff --git a/conf/release.sh b/conf/release.sh index 940f84879..972c802d1 100755 --- a/conf/release.sh +++ b/conf/release.sh @@ -4,7 +4,7 @@ # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either -# version 3 of the License, or (at your option) any later version. +# VERSION 3 of the License, or (at your option) any later VERSION. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of @@ -16,40 +16,39 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -"$CONFDIR"/build.sh +. "${CONF_DIR}"/env.sh -SONAR_TOOLS_RELEASE="$ROOTDIR/sonar/version.py" -DOCKERFILE_RELEASE="$CONFDIR/release.Dockerfile" +"${CONF_DIR}"/build.sh -version=$(grep PACKAGE_VERSION "$SONAR_TOOLS_RELEASE" | cut -d "=" -f 2 | cut -d '"' -f 2) +DOCKERFILE_RELEASE="${CONF_DIR}/release.Dockerfile" -docker_version=$(grep 'pip install sonar-tools==' "$DOCKERFILE_RELEASE" | sed -E 's/.*sonar-tools==([0-9\.]+).*/\1/') +docker_VERSION=$(grep 'pip install sonar-tools==' "${DOCKERFILE_RELEASE}" | sed -E 's/.*sonar-tools==([0-9\.]+).*/\1/') -if [ "$version" != "$docker_version" ]; then - echo "Docker version and pypi version are different ($docker_version vs $version), release aborted" +if [[ "${VERSION}" != "${docker_VERSION}" ]]; then + echo "Docker VERSION and pypi VERSION are different (${docker_VERSION} vs ${VERSION}), release aborted" exit 1 fi echo "Confirm release [y/n] ?" read -r confirm -if [ "$confirm" = "y" ]; then - version=$(grep PACKAGE_VERSION "$ROOTDIR"/sonar/version.py | cut -d "=" -f 2 | sed -e "s/[\'\" ]//g" -e "s/^ +//" -e "s/ +$//") +if [[ "${confirm}" = "y" ]]; then + VERSION=$(grep PACKAGE_VERSION "${ROOT_DIR}"/sonar/VERSION.py | cut -d "=" -f 2 | sed -e "s/[\'\" ]//g" -e "s/^ +//" -e "s/ +$//") echo "Releasing on pypi.org" - python3 -m twine upload "$ROOTDIR/dist/sonar_tools-$version-py3-none-any.whl" + python3 -m twine upload "${ROOT_DIR}/dist/sonar_tools-${VERSION}-py3-none-any.whl" echo -n "Waiting pypi release to be effective" - while [ "$(get_pypi_latest_version sonar-tools)" != "$version" ]; do + while [[ "$(get_pypi_latest_VERSION sonar-tools)" != "${VERSION}" ]]; do sleep 10 echo -n "." done echo " done" echo "Releasing on dockerhub" - docker buildx build --push --platform linux/amd64,linux/arm64 -t "olivierkorach/sonar-tools:$version" -t olivierkorach/sonar-tools:latest -f "$CONFDIR/release.Dockerfile" "$ROOTDIR" - cd "$ROOTDIR" && docker pushrm olivierkorach/sonar-tools + docker buildx build --push --platform linux/amd64,linux/arm64 -t "olivierkorach/sonar-tools:${VERSION}" -t olivierkorach/sonar-tools:latest -f "${CONF_DIR}/release.Dockerfile" "${ROOT_DIR}" + cd "${ROOT_DIR}" && docker pushrm olivierkorach/sonar-tools echo "Running scan" - "$CONFDIR/scan.sh" -test + "${CONF_DIR}/run_all.sh" -test fi \ No newline at end of file diff --git a/conf/ruff2sonar.py b/conf/ruff2sonar.py index b3c5195b1..faa3458aa 100755 --- a/conf/ruff2sonar.py +++ b/conf/ruff2sonar.py @@ -18,11 +18,8 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" +"""Converts Ruff report format to Sonar external issues format""" - Converts Ruff report format to Sonar external issues format - -""" import sys import json import re @@ -35,59 +32,73 @@ def main() -> None: """Main script entry point""" + v1 = len(sys.argv) > 1 and sys.argv[1] == "v1" rules_dict = {} - issue_list = {} + issue_list = [] lines = sys.stdin.read().splitlines() i = 0 + sonar_issue = None + issue_range = {} nblines = len(lines) + end_line = None while i < nblines: line = lines[i] - i += 1 # Search for pattern like "sonar/projects.py:196:13: B904 Within an `except` clause, raise exceptions" - if not (m := re.match(r"^([^:]+):(\d+):(\d+): ([A-Z0-9]+)( \[\*\])? (.+)$", line)): - continue - file_path = m.group(1) - line_no = int(m.group(2)) - start_col = int(m.group(3)) - 1 - end_col = start_col + 1 - rule_id = m.group(4) - message = m.group(6) - i += 1 - - # Search for " | ^^^" pattern" - while i < nblines and not re.match(r"^$", lines[i]): - if m := re.match(r"\s*\|\s(\s*)(\^+)", lines[i]): - end_col = start_col + len(m.group(2)) - i += 1 - - sonar_issue = { - "ruleId": f"{TOOLNAME}:{rule_id}", - "effortMinutes": 5, - "primaryLocation": { - "message": message, - "filePath": file_path, - "textRange": { - "startLine": line_no, - "endLine": line_no, - "startColumn": start_col, - "endColumn": end_col, + if m := re.match(r"^([^:]+):(\d+):(\d+): ([A-Z0-9]+)( \[\*\])? (.+)$", line): + if sonar_issue is not None: + issue_list.append(sonar_issue) + end_line = None + file_path = m.group(1) + issue_range = { + "startLine": int(m.group(2)), + "endLine": int(m.group(2)), + "startColumn": int(m.group(3)) - 1, + "endColumn": int(m.group(3)), + } + rule_id = m.group(4) + message = m.group(6) + sonar_issue = { + "ruleId": f"{TOOLNAME}:{rule_id}", + "effortMinutes": 5, + "primaryLocation": { + "message": m.group(6), + "filePath": file_path, + "textRange": issue_range, }, - }, - } - - issue_list[f"{rule_id} - {message}"] = sonar_issue - rules_dict[f"{TOOLNAME}:{rule_id}"] = { - "id": f"{TOOLNAME}:{rule_id}", - "name": f"{TOOLNAME}:{rule_id}", - "description": message, - "engineId": TOOLNAME, - "type": "CODE_SMELL", - "severity": "MAJOR", - "cleanCodeAttribute": "LOGICAL", - "impacts": [{"softwareQuality": "MAINTAINABILITY", "severity": "MEDIUM"}], - } + } + if v1: + sonar_issue["engineId"] = TOOLNAME + sonar_issue["severity"] = "MAJOR" + sonar_issue["type"] = "CODE_SMELL" + rules_dict[f"{TOOLNAME}:{rule_id}"] = { + "id": f"{TOOLNAME}:{rule_id}", + "name": f"{TOOLNAME}:{rule_id}", + "description": message, + "engineId": TOOLNAME, + "type": "CODE_SMELL", + "severity": "MAJOR", + "cleanCodeAttribute": "LOGICAL", + "impacts": [{"softwareQuality": "MAINTAINABILITY", "severity": "MEDIUM"}], + } + elif m := re.match(r"\s+\|\s\|(_+)\^ [A-Z0-9]+", lines[i]): + issue_range["endLine"] = end_line or issue_range["startLine"] + end_line = None + if rule_id != "I001": + issue_range["endColumn"] = len(m.group(1)) + else: + issue_range["endLine"] -= 1 + issue_range.pop("startColumn") + issue_range.pop("endColumn") + end_line = None + elif m := re.match(r"\s*(\d+)\s\|\s\|.*$", lines[i]): + end_line = int(m.group(1)) + i += 1 - external_issues = {"rules": list(rules_dict.values()), "issues": list(issue_list.values())} + if len(issue_list) == 0: + return + external_issues = {"rules": list(rules_dict.values()), "issues": issue_list} + if v1: + external_issues.pop("rules") print(json.dumps(external_issues, indent=3, separators=(",", ": "))) diff --git a/conf/run_all.sh b/conf/run_all.sh new file mode 100755 index 000000000..8711b8a4c --- /dev/null +++ b/conf/run_all.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# +# sonar-tools +# Copyright (C) 2019-2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +# ME="$( basename "${BASH_SOURCE[0]}" )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +. "${CONF_DIR}/env.sh" + +dolint="true" +dotest="false" +scanOpts=() + +while [[ $# -ne 0 ]] +do + case "${1}" in + -nolint) + dolint="false" + ;; + -test) + dotest="true" + ;; + -9) + external_format="v1" + ;; + -local) + localbuild="true" + ;; + *) + scanOpts=("${scanOpts[@]}" "${1}") + ;; + esac + shift +done + +if [[ "${dolint}" != "false" ]]; then + "${CONF_DIR}"/run_linters.sh "${external_format}" "${localbuild}" +fi + +if [[ "${dotest}" = "true" ]]; then + "${CONF_DIR}"/run_tests.sh +fi + +"${CONF_DIR}"/run_scanner.sh "${scanOpts[@]}" diff --git a/conf/run_linters.sh b/conf/run_linters.sh index d8ff79ae0..b134bb5b1 100755 --- a/conf/run_linters.sh +++ b/conf/run_linters.sh @@ -20,58 +20,73 @@ # ME="$( basename "${BASH_SOURCE[0]}" )" -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -localbuild="$1" +external_format="${1}" +localbuild="${2}" +linters_to_run="${3:-ruff,pylint,flake8,trivy,checkov,shellcheck}" -buildDir="$ROOTDIR/build" -pylintReport="$buildDir/pylint-report.out" -# banditReport="$buildDir/bandit-report.json" -flake8Report="$buildDir/flake8-report.out" -shellcheckReport="$buildDir/external-issues-shellcheck.json" -trivyReport="$buildDir/external-issues-trivy.json" -ruffReport="$buildDir/external-issues-ruff.json" -[ ! -d "$buildDir" ] && mkdir "$buildDir" -# rm -rf -- ${buildDir:?"."}/* .coverage */__pycache__ */*.pyc # mediatools/__pycache__ tests/__pycache__ - -echo "===> Running ruff" -rm -f "$ruffReport" -ruff check . | tee "$buildDir/ruff-report.txt" | "$CONFDIR"/ruff2sonar.py >"$ruffReport" -re=$? -if [ "$re" == "32" ]; then - >&2 echo "ERROR: pylint execution failed, errcode $re, aborting..." - exit $re +if [[ "${localbuild}" = "" ]]; then + localbuild="true" + if [[ "${CI}" != "" ]]; then + localbuild="false" + fi fi -cat "$buildDir/ruff-report.txt" -echo "===> Running pylint" -rm -f "$pylintReport" -pylint --rcfile "$CONFDIR"/pylintrc "$ROOTDIR"/*.py "$ROOTDIR"/*/*.py -r n --msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" | tee "$pylintReport" -re=$? -if [ "$re" == "32" ]; then - >&2 echo "ERROR: pylint execution failed, errcode $re, aborting..." - exit $re -fi +. "${CONF_DIR}/env.sh" + +[[ ! -d "${BUILD_DIR}" ]] && mkdir "${BUILD_DIR}" +# rm -rf -- ${BUILD_DIR:?"."}/* .coverage */__pycache__ */*.pyc # mediatools/__pycache__ tests/__pycache__ -echo "===> Running flake8" -rm -f "$flake8Report" -# See .flake8 file for settings -flake8 --config "$CONFIG/.flake8" "$ROOTDIR" | tee "$flake8Report" +if [[ "${linters_to_run}" == *"ruff"* ]]; then + echo "===> Running ruff" + rm -f "${RUFF_REPORT}" + ruff check . | tee "${BUILD_DIR}/ruff-report.txt" | "${CONF_DIR}"/ruff2sonar.py "${external_format}" >"${RUFF_REPORT}" + re=$? + if [[ "${re}" = "32" ]]; then + >&2 echo "ERROR: pylint execution failed, errcode ${re}, aborting..." + exit "${re}" + fi + cat "${BUILD_DIR}/ruff-report.txt" +fi -if [ "$localbuild" = "true" ]; then - echo "===> Running shellcheck" - shellcheck "$ROOTDIR"/*.sh "$ROOTDIR"/*/*.sh -s bash -f json | tee "$buildDir/shellcheck-report.txt" | "$CONFDIR"/shellcheck2sonar.py >"$shellcheckReport" - [ ! -s "$shellcheckReport" ] && rm -f "$shellcheckReport" - cat "$buildDir/shellcheck-report.txt" +if [[ "${linters_to_run}" == *"pylint"* ]]; then + echo "===> Running pylint" + rm -f "${PYLINT_REPORT}" + pylint --rcfile "${CONF_DIR}"/pylintrc "${ROOT_DIR}"/*.py "${ROOT_DIR}"/*/*.py -r n --msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" | tee "${PYLINT_REPORT}" + re=$? + if [[ "${re}" = "32" ]]; then + >&2 echo "ERROR: pylint execution failed, errcode ${re}, aborting..." + exit "${re}" + fi +fi - echo "===> Running checkov" - checkov -d . --framework dockerfile -o sarif --output-file-path "$buildDir" +if [[ "${linters_to_run}" == *"flake8"* ]]; then + echo "===> Running flake8" + rm -f "${FLAKE8_REPORT}" + # See .flake8 file for settings + flake8 --config "${CONF_DIR}/.flake8" "${ROOT_DIR}" | tee "${FLAKE8_REPORT}" +fi - echo "===> Running trivy" - "$CONFDIR"/build.sh docker - trivy image -f json -o "$buildDir"/trivy_results.json olivierkorach/sonar-tools:latest - cat "$buildDir"/trivy_results.json - python3 "$CONFDIR"/trivy2sonar.py < "$buildDir"/trivy_results.json > "$trivyReport" - [ ! -s "$trivyReport" ] && rm -f "$trivyReport" +if [[ "${localbuild}" = "true" ]]; then + if [[ "${linters_to_run}" == *"shellcheck"* ]]; then + echo "===> Running shellcheck" + shellcheck "$(find "${ROOT_DIR}" . -name '*.sh')" \ + -s bash -f json | jq | tee "${BUILD_DIR}/shellcheck-report.json" | "${CONF_DIR}"/shellcheck2sonar.py "${external_format}" > "${SHELLCHECK_REPORT}" + [[ ! -s "${SHELLCHECK_REPORT}" ]] && rm -f "${SHELLCHECK_REPORT}" + cat "${BUILD_DIR}/shellcheck-report.json" + fi + if [[ "${linters_to_run}" == *"checkov"* ]]; then + echo "===> Running checkov" + checkov -d . --framework dockerfile -o sarif --output-file-path "${BUILD_DIR}" + fi + if [[ "${linters_to_run}" == *"trivy"* ]]; then + echo "===> Running trivy" + "${CONF_DIR}"/build.sh docker + trivy image -f json -o "${BUILD_DIR}"/trivy_results.json olivierkorach/sonar-tools:latest + cat "${BUILD_DIR}"/trivy_results.json + python3 "${CONF_DIR}"/trivy2sonar.py "${external_format}" < "${BUILD_DIR}"/trivy_results.json > "${TRIVY_REPORT}" + [[ ! -s "${TRIVY_REPORT}" ]] && rm -f "${TRIVY_REPORT}" + fi fi \ No newline at end of file diff --git a/conf/run_scanner.sh b/conf/run_scanner.sh new file mode 100755 index 000000000..4f4259147 --- /dev/null +++ b/conf/run_scanner.sh @@ -0,0 +1,81 @@ +#!/bin/bash +# +# sonar-tools +# Copyright (C) 2019-2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# VERSION 3 of the License, or (at your option) any later VERSION. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +. "${CONF_DIR}/env.sh" + +auth="" +if [[ "${SONAR_HOST_URL}" = "${SONAR_HOST_URL_9}" ]]; then + auth="-Dsonar.login=${SONAR_TOKEN}" +fi + +while [[ $# -ne 0 ]] +do + case "${1}" in + -Dsonar.host.url=*) + scanOpts=("${scanOpts[@]}" "${1}") + url=$(echo "${1}" | cut -d = -f 2) + if [[ "${url}" = "${SONAR_HOST_URL_9}" ]]; then + auth="-Dsonar.login=${SONAR_TOKEN}" + fi + ;; + *) + scanOpts=("${scanOpts[@]}" "${1}") + ;; + esac + shift +done + +cmd="sonar-scanner -Dsonar.projectVersion=${VERSION} \ + -Dsonar.python.flake8.reportPaths=${FLAKE8_REPORT} \ + -Dsonar.python.pylint.reportPaths=${PYLINT_REPORT} \ + -Dsonar.token=${SONAR_TOKEN} ${auth}\ + "${scanOpts[*]}"" + +relativeDir=$(basename "${BUILD_DIR}") + if ls "${BUILD_DIR}"/coverage*.xml >/dev/null 2>&1; then + cmd="${cmd} -Dsonar.python.coverage.reportPaths=${relativeDir}/coverage*.xml" +else + echo "===> NO COVERAGE REPORT" +fi + +if ls "${BUILD_DIR}"/xunit-results*.xml >/dev/null 2>&1; then + cmd="${cmd} -Dsonar.python.xunit.reportPath=${relativeDir}/xunit-results*.xml" +else + echo "===> NO UNIT TESTS REPORT" + cmd="${cmd} -Dsonar.python.xunit.reportPath=" +fi + +if ls "${BUILD_DIR}"/external-issues*.json >/dev/null 2>&1; then + files=$(ls "${BUILD_DIR}"/external-issues*.json | tr '\n' ' ' | sed -E -e 's/ +$//' -e 's/ +/,/g') + echo "EXTERNAL ISSUES FILES = ${files}" + cmd="${cmd} -Dsonar.externalIssuesReportPaths=${files}" +else + echo "===> NO EXTERNAL ISSUES" +fi + +echo +echo "Running: ${cmd}" | sed "s/${SONAR_TOKEN}//g" +echo + +${cmd} diff --git a/conf/run_tests.sh b/conf/run_tests.sh index d961ea5a6..1f50fe679 100755 --- a/conf/run_tests.sh +++ b/conf/run_tests.sh @@ -20,27 +20,34 @@ # # ME="$( basename "${BASH_SOURCE[0]}" )" -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -buildDir="$ROOTDIR/build" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -[ ! -d "$buildDir" ] && mkdir "$buildDir" +. "${CONF_DIR}/env.sh" + +SYNC_PROJECT_KEY="TESTSYNC" + +[[ ! -d "${BUILD_DIR}" ]] && mkdir "${BUILD_DIR}" echo "Running tests" -. "$CONFDIR/build_tests.sh" +. "${CONF_DIR}/build_tests.sh" -cd "$ROOTDIR" || exit 1 +cd "${ROOT_DIR}" || exit 1 sonar start -i test for target in latest cb 9 common do - if [ "$target" != "common" ]; then - sonar start -i $target && sleep 30 + if [[ "${target}" != "common" ]]; then + sonar start -i "${target}" && sleep 30 fi - if [ -d "$ROOTDIR/$GEN_LOC/$target/" ]; then - poetry run coverage run --branch --source="$ROOTDIR" -m pytest "$ROOTDIR/$GEN_LOC/$target/" --junit-xml="$buildDir/xunit-results-$target.xml" - poetry run coverage xml -o "$buildDir/coverage-$target.xml" + if [[ -d "${ROOT_DIR}/${GEN_LOC}/${target}/" ]]; then + # Recreate a fresh TESTSYNC project for sync tests + curl -X POST -u "${SONAR_TOKEN_TEST_ADMIN_USER}:" "${SONAR_HOST_URL_TEST}/api/projects/delete?project=${SYNC_PROJECT_KEY}" + conf/run_scanner.sh -Dsonar.host.url="${SONAR_HOST_URL_TEST}" -Dsonar.projectKey="${SYNC_PROJECT_KEY}" -Dsonar.projectName="${SYNC_PROJECT_KEY}" -Dsonar.token="${SONAR_TOKEN_TEST_ADMIN_ANALYSIS}" + # Run tests + poetry run coverage run --branch --source="${ROOT_DIR}" -m pytest "${ROOT_DIR}/${GEN_LOC}/${target}/" --junit-xml="${BUILD_DIR}/xunit-results-${target}.xml" + poetry run coverage xml -o "${BUILD_DIR}/coverage-${target}.xml" fi done diff --git a/conf/scan.sh b/conf/scan.sh deleted file mode 100755 index 9d9d0c944..000000000 --- a/conf/scan.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/bin/bash -# -# sonar-tools -# Copyright (C) 2019-2025 Olivier Korach -# mailto:olivier.korach AT gmail DOT com -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 3 of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# - -# ME="$( basename "${BASH_SOURCE[0]}" )" -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -dolint="true" -dotest="false" -if [ "$CI" == "" ]; then - localbuild="true" -else - localbuild="false" -fi - -scanOpts=() - -while [ $# -ne 0 ] -do - case "$1" in - -nolint) - dolint="false" - ;; - -test) - dotest="true" - ;; - -local) - localbuild="true" - ;; - *) - scanOpts=("${scanOpts[@]}" "$1") - ;; - esac - shift -done - -buildDir="build" -pylintReport="$buildDir/pylint-report.out" -flake8Report="$buildDir/flake8-report.out" - -[ ! -d "$buildDir" ] && mkdir "$buildDir" -# rm -rf -- ${buildDir:?"."}/* .coverage */__pycache__ */*.pyc # mediatools/__pycache__ testpytest/__pycache__ testunittest/__pycache__ - - -if [ "$dolint" != "false" ]; then - "$CONFDIR"/run_linters.sh "$localbuild" -fi - -if [ "$dotest" == "true" ]; then - "$CONFDIR"/run_tests.sh -fi - -version=$(grep PACKAGE_VERSION "$ROOTDIR/sonar/version.py" | cut -d "=" -f 2 | sed -e "s/[\'\" ]//g" -e "s/^ +//" -e "s/ +$//") - - -cmd="sonar-scanner -Dsonar.projectVersion=$version \ - -Dsonar.python.flake8.reportPaths=$flake8Report \ - -Dsonar.python.pylint.reportPaths=$pylintReport \ - -Dsonar.token=$SONAR_TOKEN \ - "${scanOpts[*]}"" - -if [ "$SONAR_HOST_URL" == "$SONAR_HOST_URL_9" ]; then - cmd="$cmd -Dsonar.login=$SONAR_TOKEN" -fi - -if ls $buildDir/coverage*.xml >/dev/null 2>&1; then - cmd="$cmd -Dsonar.python.coverage.reportPaths=$buildDir/coverage*.xml" -else - echo "===> NO COVERAGE REPORT" -fi - -if ls $buildDir/xunit-results*.xml >/dev/null 2>&1; then - cmd="$cmd -Dsonar.python.xunit.reportPath=$buildDir/xunit-results*.xml" -else - echo "===> NO UNIT TESTS REPORT" - cmd="$cmd -Dsonar.python.xunit.reportPath=" -fi - -if ls $buildDir/external-issues*.json >/dev/null 2>&1; then - files=$(ls $buildDir/external-issues*.json | tr '\n' ' ' | sed -E -e 's/ +$//' -e 's/ +/,/g') - echo "EXTERNAL ISSUES FILES = $files" - cmd="$cmd -Dsonar.externalIssuesReportPaths=$files" -else - echo "===> NO EXTERNAL ISSUES" -fi - - -echo -echo "Running: $cmd" | sed "s/$SONAR_TOKEN//g" -echo - -$cmd diff --git a/conf/shellcheck2sonar.py b/conf/shellcheck2sonar.py index 59fe93f7c..8a9e3a27b 100755 --- a/conf/shellcheck2sonar.py +++ b/conf/shellcheck2sonar.py @@ -20,18 +20,21 @@ # """ - Converts shellcheck JSON format to Sonar external issues format +Converts shellcheck JSON format to Sonar external issues format """ + import sys import json -SHELLCHECK = "shellcheck" +TOOLNAME = "shellcheck" MAPPING = {"INFO": "INFO", "LOW": "MINOR", "MEDIUM": "MAJOR", "HIGH": "CRITICAL", "BLOCKER": "BLOCKER"} def main() -> None: """Main script entry point""" + v1 = len(sys.argv) > 1 and sys.argv[1] == "v1" + text = "".join(sys.stdin) rules_dict = {} @@ -39,7 +42,7 @@ def main() -> None: for issue in json.loads(text): sonar_issue = { - "ruleId": f"{SHELLCHECK}:{issue['code']}", + "ruleId": f"{TOOLNAME}:{issue['code']}", "effortMinutes": 5, "primaryLocation": { "message": issue["message"], @@ -52,24 +55,32 @@ def main() -> None: }, }, } - issue_list.append(sonar_issue) if issue["level"] in ("info", "style"): sev_mqr = "LOW" elif issue["level"] == "warning": sev_mqr = "MEDIUM" else: sev_mqr = "HIGH" - rules_dict[f"{SHELLCHECK}:{issue['code']}"] = { - "id": f"{SHELLCHECK}:{issue['code']}", - "name": f"{SHELLCHECK}:{issue['code']}", - "engineId": SHELLCHECK, + rules_dict[f"{TOOLNAME}:{issue['code']}"] = { + "id": f"{TOOLNAME}:{issue['code']}", + "name": f"{TOOLNAME}:{issue['code']}", + "engineId": TOOLNAME, "type": "CODE_SMELL", "cleanCodeAttribute": "LOGICAL", "severity": MAPPING[sev_mqr], "impacts": [{"softwareQuality": "MAINTAINABILITY", "severity": sev_mqr}], } + if v1: + sonar_issue["engineId"] = TOOLNAME + sonar_issue["severity"] = MAPPING.get(sev_mqr, sev_mqr) + sonar_issue["type"] = "CODE_SMELL" + issue_list.append(sonar_issue) + if len(issue_list) == 0: + return external_issues = {"rules": list(rules_dict.values()), "issues": issue_list} + if v1: + external_issues.pop("rules") print(json.dumps(external_issues, indent=3, separators=(",", ": "))) diff --git a/conf/trivy2sonar.py b/conf/trivy2sonar.py index 156c230ee..6939cfb3e 100755 --- a/conf/trivy2sonar.py +++ b/conf/trivy2sonar.py @@ -20,9 +20,10 @@ # """ - Converts Trivy JSON format to Sonar external issues format +Converts Trivy JSON format to Sonar external issues format """ + import sys import json @@ -34,13 +35,13 @@ def main() -> None: """Main script entry point""" + v1 = len(sys.argv) > 1 and sys.argv[1] == "v1" text = "".join(sys.stdin) rules_dict = {} issue_list = {} for issue in json.loads(text)["Results"][0]["Vulnerabilities"]: - sonar_issue = { "ruleId": f"{TOOLNAME}:{issue['VulnerabilityID']}", "effortMinutes": 30, @@ -55,7 +56,6 @@ def main() -> None: }, }, } - issue_list[sonar_issue["primaryLocation"]["message"]] = sonar_issue # score = max([v["V3Score"] for v in issue['CVSS'].values()]) # if score <= 4: # sev = "LOW" @@ -77,8 +77,17 @@ def main() -> None: "cleanCodeAttribute": "LOGICAL", "impacts": [{"softwareQuality": "SECURITY", "severity": sev_mqr}], } + if v1: + sonar_issue["engineId"] = TOOLNAME + sonar_issue["severity"] = MAPPING.get(sev_mqr, sev_mqr) + sonar_issue["type"] = "VULNERABILITY" + issue_list[sonar_issue["primaryLocation"]["message"]] = sonar_issue + if len(issue_list) == 0: + return external_issues = {"rules": list(rules_dict.values()), "issues": list(issue_list.values())} + if v1: + external_issues.pop("rules") print(json.dumps(external_issues, indent=3, separators=(",", ": "))) diff --git a/migration/README.md b/migration/README.md index f9b46b640..ab12959be 100644 --- a/migration/README.md +++ b/migration/README.md @@ -18,7 +18,7 @@ Command line tool to collect SonarQube data to prepare eventual migration to Son # Requirements and Installation -- `sonar-migration` requires python 3.8 or higher +- `sonar-migration` requires python 3.9 or higher - Installation is based on [pip](https://pypi.org/project/pip/). - Online installation. - Run: `python3 -m pip install sonar-migration` (or `python3 -m pip upgrade sonar-migration`) diff --git a/migration/build.sh b/migration/build.sh new file mode 100755 index 000000000..88867c2b1 --- /dev/null +++ b/migration/build.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# +# sonar-tools +# Copyright (C) 2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +build_image=1 + +while [[ $# -ne 0 ]]; do + case "${1}" in + nodocker) + build_image=0 + ;; + *) + ;; + esac + shift +done + +rm -rf "${ROOT_DIR}/build/lib/migration" "${ROOT_DIR}/build/lib/cli" "${ROOT_DIR}/build/lib/sonar" "${ROOT_DIR}"/build/scripts*/sonar_migration "${ROOT_DIR}"/dist/sonar_migration* +mv "${ROOT_DIR}/pyproject.toml" "${ROOT_DIR}/pyproject.toml.sonar-tools" +cp "${ROOT_DIR}/migration/pyproject.toml" "${ROOT_DIR}" +poetry build +mv "${ROOT_DIR}/pyproject.toml.sonar-tools" "${ROOT_DIR}/pyproject.toml" + +# Deploy locally for tests +pip install --upgrade --force-reinstall "${ROOT_DIR}"/dist/sonar_migration-*-py3-*.whl + +if [[ "${build_image}" == "1" ]]; then + docker build -t olivierkorach/sonar-migration:latest -f migration/snapshot.Dockerfile "${ROOT_DIR}" --load +fi \ No newline at end of file diff --git a/migration/deploy.sh b/migration/deploy.sh index 9e0fc4ad0..34e73991d 100755 --- a/migration/deploy.sh +++ b/migration/deploy.sh @@ -20,14 +20,15 @@ # ME="$( basename "${BASH_SOURCE[0]}" )" -ROOTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" -CONFDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )" +CONF_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" build_image=1 release=0 +release_docker=0 -while [ $# -ne 0 ]; do - case $1 in +while [[ $# -ne 0 ]]; do + case "${1}" in nodocker) build_image=0 ;; @@ -43,27 +44,30 @@ while [ $# -ne 0 ]; do shift done -ruff format -rm -rf "$ROOTDIR/build/lib/migration" "$ROOTDIR/build/lib/cli" "$ROOTDIR/build/lib/sonar" "$ROOTDIR"/build/scripts*/sonar_migration "$ROOTDIR"/dist/sonar_migration* -python3 "$ROOTDIR/setup_migration.py" bdist_wheel +rm -rf "${ROOT_DIR}/build/lib/migration" "${ROOT_DIR}/build/lib/cli" "${ROOT_DIR}/build/lib/sonar" "${ROOT_DIR}"/build/scripts*/sonar_migration "${ROOT_DIR}"/dist/sonar_migration* +mv "${ROOT_DIR}/pyproject.toml" "${ROOT_DIR}/pyproject.toml.sonar-tools" +cp "${ROOT_DIR}/migration/pyproject.toml" "${ROOT_DIR}" +poetry build +mv "${ROOT_DIR}/pyproject.toml.sonar-tools" "${ROOT_DIR}/pyproject.toml" + # Deploy locally for tests -pip install --upgrade --force-reinstall "$ROOTDIR"/dist/sonar_migration-*-py3-*.whl +pip install --upgrade --force-reinstall "${ROOT_DIR}"/dist/sonar_migration-*-py3-*.whl -if [ "$build_image" == "1" ]; then - docker build -t olivierkorach/sonar-migration:latest -f migration/snapshot.Dockerfile "$ROOTDIR" --load +if [[ "${build_image}" == "1" ]]; then + docker build -t olivierkorach/sonar-migration:latest -f migration/snapshot.Dockerfile "${ROOT_DIR}" --load fi # Deploy on pypi.org once released -if [ "$release" = "1" ]; then +if [[ "${release}" = "1" ]]; then echo "Confirm release [y/n] ?" read -r confirm - if [ "$confirm" = "y" ]; then - python3 -m twine upload "$ROOTDIR"/dist/sonar_migration-*-py3-*.whl + if [[ "${confirm}" = "y" ]]; then + python3 -m twine upload "${ROOT_DIR}"/dist/sonar_migration-*-py3-*.whl fi fi -if [ "$release_docker" = "1" ]; then - docker buildx build --push --platform linux/amd64,linux/arm64 -t olivierkorach/sonar-migration:0.4 -t olivierkorach/sonar-migration:latest -f migration/release.Dockerfile "$ROOTDIR" - cd "$CONFDIR" && docker pushrm olivierkorach/sonar-migration +if [[ "${release_docker}" = "1" ]]; then + docker buildx build --push --platform linux/amd64,linux/arm64 -t olivierkorach/sonar-migration:0.4 -t olivierkorach/sonar-migration:latest -f migration/release.Dockerfile "${ROOT_DIR}" + cd "${CONF_DIR}" && docker pushrm olivierkorach/sonar-migration fi \ No newline at end of file diff --git a/migration/migration.py b/migration/migration.py index 2cfa08783..c1e490b6c 100644 --- a/migration/migration.py +++ b/migration/migration.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Exports SonarQube platform configuration as JSON +Exports SonarQube platform configuration as JSON """ from cli import options, config diff --git a/migration/pyproject.toml b/migration/pyproject.toml new file mode 100644 index 000000000..1384fd8cb --- /dev/null +++ b/migration/pyproject.toml @@ -0,0 +1,183 @@ +[project] +name = "sonar-migration" +version = "0.6" +description = "A tool for SonarQube Server to Cloud migration assistance" +authors = [ + {name = "Olivier Korach", email = "olivier.korach@gmail.com"}, +] +maintainers = [ + {name = "Olivier Korach", email = "olivier.korach@gmail.com"}, +] + +license = "LGPL-3.0-only" + +license-files = ["LICEN[CS]E*"] + +keywords = ["sonarqube", "sonar", "migration", "administration", "automation"] + +readme = "README.md" +requires-python = ">= 3.9" +dependencies = [ + "argparse", + "datetime", + "python-dateutil (>=2.9.0)", + "requests (>=2.32)", + "jprops (>=2.0.2)", + "levenshtein (>=0.27.1)", + "PyYAML (>=6.0.2)", +] + +classifiers = [ + "Development Status :: 5 - Production/Stable", + # Indicate who your project is intended for + "Intended Audience :: Developers", + "Topic :: Code Quality :: Utility Tools", + # Specify the Python versions you support here. + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] + +[project.urls] +Homepage = "https://pypi.org/sonar-migration" +Documentation = "https://github.com/okorach/migration/README.md" +Repository = "https://github.com/okorach/sonar-tools.git" +Issues = "https://github.com/okorach/sonar-tools/issues" +Changelog = "https://github.com/okorach/sonar-tools/blob/master/doc/what-is-new.md" + +[project.scripts] +sonar-migration = "migration.migration:main" + +[build-system] +build-backend = "poetry.core.masonry.api" +requires = [ + "poetry-core>=2.0.0,<3.0.0", + "wheel", + "sphinx>=7.4.7", + "sphinx_rtd_theme>=3.0.2", + "sphinx-autodoc-typehints>=2.2.3", + "twine", + "get_pypi_latest_version", +] + +[tool.poetry] +packages = [{include = "sonar"}, {include = "migration"}, {include = "cli"}] + +[dependency-groups] +test = [ + "pytest (>=8.1.1)", + "coverage (>=7.4.4)", +] +lint = [ + "ruff (>=0.5.2)", + "pylint (>=3.1.0)", + "flake8 (>=7.0.0)", +] +docs = [ + "sphinx (>=7.3.7)", + "sphinx-autodoc-typehints (>=2.1.0)", + "sphinx-rtd-theme (>=2.0.0)", +] +dev = [ + { include-group = "test" }, + { include-group = "lint" }, + { include-group = "docs" }, +] + +[tool.ruff] + +# Set the maximum line length to 150. +line-length = 150 +target-version = "py39" +indent-width = 4 + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = true + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" + + +[tool.ruff.lint] +# Add the `line-too-long` rule to the enforced rule set. By default, Ruff omits rules that +# overlap with the use of a formatter, like Black, but we can override this behavior by +# explicitly adding the rule. +# extend-select = ["E501"] +# select = [ +# # pycodestyle +# "E", +# # Pyflakes +# "F", +# # pyupgrade +# # "UP", +# # flake8-bugbear +# "B", +# # flake8-simplify +# "SIM", +# # isort +# "I", +#] +select = [ "ALL" ] +extend-ignore = [ + "D403", + "D415", + "D400", + "FBT002", + "FBT001", + "PTH123", # `open()` should be replaced by `Path.open()` + "ISC001", + "COM812", # Missing trailing comma in a dictionary or set literal + "D211", + "D213", + "D203", + "D401", + "ERA001", + "S101", + "I001", + "TRY003", + "EM102", +] + +exclude = [ + ".eggs", + ".git", + ".mypy_cache", + ".pyenv", + ".pytest_cache", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "build", + "dist", + "node_modules", + "site-packages" +] + +[tool.ruff.lint.pyupgrade] +# Preserve types, even if a file imports `from __future__ import annotations`. +keep-runtime-typing = true diff --git a/migration/release.Dockerfile b/migration/release.Dockerfile index 95cf1ce22..d52026124 100644 --- a/migration/release.Dockerfile +++ b/migration/release.Dockerfile @@ -22,16 +22,14 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" WORKDIR /opt/sonar-migration COPY ./sonar sonar -COPY ./requirements.txt . COPY ./cli cli -COPY ./setup_migration.py . COPY ./migration migration COPY ./migration/README.md . COPY ./LICENSE . COPY ./sonar/audit sonar/audit RUN pip install --upgrade pip \ -&& pip install sonar-migration==0.4 --force-reinstall +&& pip install sonar-migration==0.6 --force-reinstall USER ${USERNAME} WORKDIR /home/${USERNAME} diff --git a/migration/snapshot.Dockerfile b/migration/snapshot.Dockerfile index af947163d..c79c3be28 100644 --- a/migration/snapshot.Dockerfile +++ b/migration/snapshot.Dockerfile @@ -22,16 +22,15 @@ ENV PATH="${VIRTUAL_ENV}/bin:${PATH}" WORKDIR /opt/sonar-migration COPY ./sonar sonar -COPY ./requirements.txt . +COPY ./migration/pyproject.toml . COPY ./migration migration +COPY ./migration/README.md . COPY ./LICENSE . COPY ./cli cli -COPY ./setup_migration.py . RUN pip install --upgrade pip \ -&& pip install --no-cache-dir -r requirements.txt \ -&& pip install --no-cache-dir --upgrade pip setuptools wheel \ -&& python setup_migration.py bdist_wheel \ +&& pip install --no-cache-dir poetry \ +&& poetry build \ && pip install dist/sonar_migration-*-py3-*.whl --force-reinstall USER ${USERNAME} diff --git a/poetry.lock b/poetry.lock index 1981b6bdf..2ea889106 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.0 and should not be changed by hand. [[package]] name = "alabaster" @@ -68,53 +68,6 @@ files = [ [package.extras] dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] -[[package]] -name = "black" -version = "24.8.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -groups = ["dev", "lint"] -files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "certifi" version = "2025.8.3" @@ -216,21 +169,6 @@ files = [ {file = "charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14"}, ] -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev", "lint"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - [[package]] name = "colorama" version = "0.4.6" @@ -238,11 +176,11 @@ description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev", "docs", "lint", "test"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {dev = "sys_platform == \"win32\" or platform_system == \"Windows\"", docs = "sys_platform == \"win32\"", lint = "platform_system == \"Windows\" or sys_platform == \"win32\"", test = "sys_platform == \"win32\""} [[package]] name = "coverage" @@ -772,42 +710,18 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["dev", "lint"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - [[package]] name = "packaging" version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev", "docs", "lint", "test"] +groups = ["dev", "docs", "test"] files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev", "lint"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - [[package]] name = "platformdirs" version = "4.3.6" @@ -1598,14 +1512,14 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.5.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "dev", "docs"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index 4144c11f7..e55747f9a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ requires = [ ] [tool.poetry] -packages = [{include = "sonar"}, {include = "cli"}] +packages = [{include = "sonar"}, {include = "migration"}, {include = "cli"}] [dependency-groups] test = [ @@ -166,6 +166,12 @@ extend-ignore = [ "D211", "D213", "D203", + "D401", + "ERA001", + "S101", + "I001", + "TRY003", + "EM102", ] exclude = [ @@ -187,3 +193,6 @@ exclude = [ "site-packages" ] +[tool.ruff.lint.pyupgrade] +# Preserve types, even if a file imports `from __future__ import annotations`. +keep-runtime-typing = true diff --git a/requirements-to-build.txt b/requirements-to-build.txt deleted file mode 100644 index 85f8ede55..000000000 --- a/requirements-to-build.txt +++ /dev/null @@ -1,12 +0,0 @@ -setuptools -black -wheel -sphinx -sphinx_rtd_theme -sphinx-autodoc-typehints -twine -pytest -coverage -flake8 -pylint -get_pypi_latest_version diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 23c786459..000000000 --- a/requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ -argparse -datetime -python-dateutil -requests -jprops -levenshtein -PyYAML diff --git a/setup.py b/setup.py deleted file mode 100644 index cf35a5bbc..000000000 --- a/setup.py +++ /dev/null @@ -1,83 +0,0 @@ -# -# sonar-tools -# Copyright (C) 2019-2025 Olivier Korach -# mailto:olivier.korach AT gmail DOT com -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 3 of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# - -""" - - Package setup - -""" -import setuptools -from sonar import version - - -with open("README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() -setuptools.setup( - name="sonar-tools", - version=version.PACKAGE_VERSION, - scripts=["sonar-tools"], - author="Olivier Korach", - author_email="olivier.korach@gmail.com", - description="A collection of utility scripts for SonarQube Server or Cloud", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/okorach/sonar-tools", - project_urls={ - "Bug Tracker": "https://github.com/okorach/sonar-tools/issues", - "Documentation": "https://github.com/okorach/sonar-tools/README.md", - "Source Code": "https://github.com/okorach/sonar-tools", - }, - packages=setuptools.find_packages(), - package_data={"sonar": ["LICENSE", "audit/rules.json", "config.json", "audit/sonar-audit.properties"]}, - install_requires=[ - "argparse", - "datetime", - "python-dateutil", - "requests", - "jprops", - "levenshtein", - "PyYAML ", - ], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", - "Operating System :: OS Independent", - ], - entry_points={ - "console_scripts": [ - "sonar-audit = cli.audit:main", - "sonar-projects-export = cli.projects_export:main", - "sonar-projects-import = cli.projects_import:main", - "sonar-projects = cli.projects_cli:main", - "sonar-measures-export = cli.measures_export:main", - "sonar-housekeeper = cli.housekeeper:main", - "sonar-issues-sync = cli.findings_sync:main", - "sonar-findings-sync = cli.findings_sync:main", - "sonar-custom-measures = cli.cust_measures:main", - "sonar-issues-export = cli.findings_export:main", - "sonar-findings-export = cli.findings_export:main", - "sonar-loc = cli.loc:main", - "sonar-config = cli.config:main", - "support-audit = cli.support:main", - "sonar-rules = cli.rules_cli:main", - ] - }, - python_requires=">=3.8", -) diff --git a/setup_migration.py b/setup_migration.py deleted file mode 100644 index fa9b95db0..000000000 --- a/setup_migration.py +++ /dev/null @@ -1,66 +0,0 @@ -# -# sonar-tools -# Copyright (C) 2019-2025 Olivier Korach -# mailto:olivier.korach AT gmail DOT com -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 3 of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# - -""" - - Package setup - -""" -import setuptools -from sonar import version - -with open("migration/README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() -setuptools.setup( - name="sonar-migration", - version=version.MIGRATION_TOOL_VERSION, - scripts=["migration/sonar_migration"], - author="Olivier Korach", - author_email="olivier.korach@gmail.com", - description="A SonarQube collection tool to use in the context of SonarQube Server to SonarQube Cloud migration", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/okorach/sonar-tools", - project_urls={ - "Bug Tracker": "https://github.com/okorach/sonar-tools/issues", - "Documentation": "https://github.com/okorach/sonar-tools/migration/README.md", - "Source Code": "https://github.com/okorach/sonar-tools", - }, - packages=setuptools.find_packages(), - package_data={"sonar": ["LICENSE", "audit/rules.json", "audit/sonar-audit.properties"]}, - install_requires=[ - "argparse", - "datetime", - "python-dateutil", - "requests", - "jprops", - ], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)", - "Operating System :: OS Independent", - ], - entry_points={ - "console_scripts": [ - "sonar-migration = migration.migration:main", - ] - }, - python_requires=">=3.8", -) diff --git a/sonar-project.properties b/sonar-project.properties index 9a98c3a6d..5e1a9984b 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -4,11 +4,11 @@ sonar.projectName=Sonar Tools sonar.python.version=3.9 # Comma-separated paths to directories with sources (required) -sonar.sources=sonar, cli, migration, conf, setup.py, setup_migration.py +sonar.sources=sonar, cli, migration, conf # Encoding of the source files sonar.sourceEncoding=UTF-8 -sonar.exclusions=doc/api/**/*, build/**/* +sonar.exclusions=doc/api/**/*, build/**/*, migration/pyproject.toml sonar.python.flake8.reportPaths=build/flake8-report.out sonar.python.pylint.reportPaths=build/pylint-report.out sonar.sarifReportPaths=build/results_sarif.sarif @@ -16,6 +16,5 @@ sonar.sarifReportPaths=build/results_sarif.sarif # sonar.python.bandit.reportPaths=build/bandit-report.json sonar.tests=test/gen/latest, test/gen/lts, test/gen/9, test/gen/9-ce, test/gen/cb -sonar.coverage.exclusions=setup*.py, test/**/*, conf/*2sonar.py, cli/cust_measures.py, sonar/custom_measures.py, cli/support.py, cli/projects_export.py, cli/projects_import.py +sonar.coverage.exclusions=test/**/*, conf/*2sonar.py, cli/cust_measures.py, sonar/custom_measures.py, cli/support.py, cli/projects_export.py, cli/projects_import.py, **/*.sh -sonar.cpd.exclusions=setup*.py diff --git a/sonar/aggregations.py b/sonar/aggregations.py index 16c69f0ed..39c3bfc21 100644 --- a/sonar/aggregations.py +++ b/sonar/aggregations.py @@ -19,7 +19,7 @@ # """ - Parent module of applications and portfolios +Parent module of applications and portfolios """ diff --git a/sonar/app_branches.py b/sonar/app_branches.py index 68d09e89e..8d5a662e0 100644 --- a/sonar/app_branches.py +++ b/sonar/app_branches.py @@ -18,14 +18,12 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Abstraction of Sonar Application Branch """ +"""Abstraction of Sonar Application Branch""" from __future__ import annotations from typing import Optional import json -from http import HTTPStatus -from requests import RequestException from requests.utils import quote import sonar.logging as log @@ -112,11 +110,7 @@ def create(cls, app: object, name: str, project_branches: list[Branch]) -> Appli else: # Default main branch of project params["project"].append(obj.key) params["projectBranch"].append("") - try: - app.endpoint.post(ApplicationBranch.API[c.CREATE], params=params) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"creating branch {name} of {str(app)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{str(app)} branch '{name}", e.response.text) + app.endpoint.post(ApplicationBranch.API[c.CREATE], params=params) return ApplicationBranch(app=app, name=name, project_branches=project_branches) @classmethod @@ -201,10 +195,9 @@ def update(self, name: str, project_branches: list[Branch]) -> bool: params["projectBranch"].append(br_name) try: ok = self.post(ApplicationBranch.API[c.UPDATE], params=params).ok - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"updating {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: ApplicationBranch.CACHE.pop(self) - raise exceptions.ObjectNotFound(str(self), e.response.text) + raise self.name = name self._project_branches = project_branches diff --git a/sonar/applications.py b/sonar/applications.py index 0a52518e9..0738916b8 100644 --- a/sonar/applications.py +++ b/sonar/applications.py @@ -93,11 +93,7 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Application: o = Application.CACHE.get(key, endpoint.local_url) if o: return o - try: - data = json.loads(endpoint.get(Application.API[c.GET], params={"application": key}).text)["application"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"searching application {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Application key '{key}' not found") + data = json.loads(endpoint.get(Application.API[c.GET], params={"application": key}).text)["application"] return cls.load(endpoint, data) @classmethod @@ -132,11 +128,7 @@ def create(cls, endpoint: pf.Platform, key: str, name: str) -> Application: :rtype: Application """ check_supported(endpoint) - try: - endpoint.post(Application.API["CREATE"], params={"key": key, "name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating application {key}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(key, e.response.text) + endpoint.post(Application.API["CREATE"], params={"key": key, "name": name}) log.info("Creating object") return Application(endpoint=endpoint, key=key, name=name) @@ -151,10 +143,9 @@ def refresh(self) -> None: self.reload(json.loads(self.get("navigation/component", params={"component": self.key}).text)) self.reload(json.loads(self.get(Application.API[c.GET], params=self.api_params(c.GET)).text)["application"]) self.projects() - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"refreshing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Application.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + raise def __str__(self) -> str: """String name of object""" diff --git a/sonar/audit/audit_config.py b/sonar/audit/audit_config.py index 66c00f42e..588f4605b 100644 --- a/sonar/audit/audit_config.py +++ b/sonar/audit/audit_config.py @@ -19,6 +19,7 @@ # """sonar-config CLI""" + import os import pathlib import jprops diff --git a/sonar/audit/rules.py b/sonar/audit/rules.py index 44164c809..6873ccd60 100644 --- a/sonar/audit/rules.py +++ b/sonar/audit/rules.py @@ -19,6 +19,7 @@ # """Abstraction of the audit rule concept""" + import enum import json from typing import Optional diff --git a/sonar/audit/severities.py b/sonar/audit/severities.py index c02153f8f..dcc015dd4 100644 --- a/sonar/audit/severities.py +++ b/sonar/audit/severities.py @@ -19,6 +19,7 @@ # """Sonar Audit severities""" + import enum from typing import Optional diff --git a/sonar/branches.py b/sonar/branches.py index 2f22b6256..115e96452 100644 --- a/sonar/branches.py +++ b/sonar/branches.py @@ -24,8 +24,8 @@ from http import HTTPStatus from typing import Optional import json +import re from urllib.parse import unquote -from requests import HTTPError, RequestException import requests.utils from sonar import platform @@ -89,16 +89,11 @@ def get_object(cls, concerned_object: projects.Project, branch_name: str) -> Bra o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) if o: return o - try: - data = json.loads(concerned_object.get(Branch.API[c.LIST], params={"project": concerned_object.key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"searching {str(concerned_object)} for branch '{branch_name}'", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") - - for br in data.get("branches", []): - if br["name"] == branch_name: - return cls.load(concerned_object, branch_name, br) - raise exceptions.ObjectNotFound(branch_name, f"Branch '{branch_name}' of {str(concerned_object)} not found") + data = json.loads(concerned_object.get(Branch.API[c.LIST], params={"project": concerned_object.key}).text) + br = next((b for b in data.get("branches", []) if b["name"] == branch_name), None) + if not br: + raise exceptions.ObjectNotFound(branch_name, f"Branch '{branch_name}' of {str(concerned_object)} not found") + return cls.load(concerned_object, branch_name, br) @classmethod def load(cls, concerned_object: projects.Project, branch_name: str, data: types.ApiPayload) -> Branch: @@ -112,9 +107,11 @@ def load(cls, concerned_object: projects.Project, branch_name: str, data: types. """ branch_name = unquote(branch_name) o = Branch.CACHE.get(concerned_object.key, branch_name, concerned_object.base_url()) + br_data = next((br for br in data.get("branches", []) if br["name"] == branch_name), None) if not o: o = cls(concerned_object, branch_name) - o._load(data) + if br_data: + o._load(br_data) return o def __str__(self) -> str: @@ -135,44 +132,33 @@ def refresh(self) -> Branch: :return: itself :rtype: Branch """ - try: - data = json.loads(self.get(Branch.API[c.LIST], params=self.api_params(c.LIST)).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"refreshing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found in SonarQube") - for br in data.get("branches", []): - if br["name"] == self.name: - self._load(br) - else: - # While we're there let's load other branches with up to date branch data - Branch.load(self.concerned_object, br["name"], data) + data = json.loads(self.get(Branch.API[c.LIST], params=self.api_params(c.LIST)).text) + br_data = next((br for br in data.get("branches", []) if br["name"] == self.name), None) + if not br_data: + Branch.CACHE.clear() + raise exceptions.ObjectNotFound(self.name, f"{str(self)} not found") + self._load(br_data) + # While we're there let's load other branches with up to date branch data + for br in [b for b in data.get("branches", []) if b["name"] != self.name]: + Branch.load(self.concerned_object, br["name"], data) return self def _load(self, data: types.ApiPayload) -> None: - if self.sq_json is None: - self.sq_json = data - else: - self.sq_json.update(data) + log.debug("Loading %s with data %s", self, data) + self.sq_json = (self.sq_json or {}) | data self._is_main = self.sq_json["isMain"] self._last_analysis = util.string_to_date(self.sq_json.get("analysisDate", None)) self._keep_when_inactive = self.sq_json.get("excludedFromPurge", False) self._is_main = self.sq_json.get("isMain", False) def is_kept_when_inactive(self) -> bool: - """ - :return: Whether the branch is kept when inactive - :rtype: bool - """ + """Returns whether the branch is kept when inactive""" if self._keep_when_inactive is None or self.sq_json is None: self.refresh() return self._keep_when_inactive def is_main(self) -> bool: - """ - :return: Whether the branch is the project main branch - :rtype: bool - """ + """Returns whether the branch is the project main branch""" if self._is_main is None or self.sq_json is None: self.refresh() return self._is_main @@ -186,11 +172,32 @@ def delete(self) -> bool: """ try: return super().delete() - except (ConnectionError, RequestException) as e: - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("Can't delete %s, it's the main branch", str(self)) + except exceptions.SonarException as e: + log.warning(e.message) return False + def get( + self, api: str, params: types.ApiParams = None, data: Optional[str] = None, mute: tuple[HTTPStatus] = (), **kwargs: str + ) -> requests.Response: + """Performs an HTTP GET request for the object""" + try: + return super().get(api=api, params=params, data=data, mute=mute, **kwargs) + except exceptions.ObjectNotFound as e: + if re.match(r"Project .+ not found", e.message): + log.warning("Clearing project cache") + projects.Project.CACHE.clear() + raise + + def post(self, api: str, params: types.ApiParams = None, mute: tuple[HTTPStatus] = (), **kwargs: str) -> requests.Response: + """Performs an HTTP POST request for the object""" + try: + return super().post(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound as e: + if re.match(r"Project .+ not found", e.message): + log.warning("Clearing project cache") + projects.Project.CACHE.clear() + raise + def new_code(self) -> str: """ :return: The branch new code period definition @@ -199,13 +206,7 @@ def new_code(self) -> str: if self._new_code is None and self.endpoint.is_sonarcloud(): self._new_code = settings.new_code_to_string({"inherited": True}) elif self._new_code is None: - try: - data = json.loads(self.get(api=Branch.API["get_new_code"], params=self.api_params(c.LIST)).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting new code period of {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.concerned_object.key, f"{str(self.concerned_object)} not found") - + data = json.loads(self.get(api=Branch.API["get_new_code"], params=self.api_params(c.LIST)).text) for b in data["newCodePeriods"]: new_code = settings.new_code_to_string(b) if b["branchKey"] == self.name: @@ -245,12 +246,9 @@ def set_keep_when_inactive(self, keep: bool) -> bool: :return: Whether the operation was successful """ log.info("Setting %s keep when inactive to %s", self, keep) - try: - self.post("project_branches/set_automatic_deletion_protection", params=self.api_params() | {"value": str(keep).lower()}) + ok = self.post("project_branches/set_automatic_deletion_protection", params=self.api_params() | {"value": str(keep).lower()}).ok + if ok: self._keep_when_inactive = keep - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} keep when inactive to {keep}", catch_all=True) - return False return True def set_as_main(self) -> bool: @@ -258,11 +256,7 @@ def set_as_main(self) -> bool: :return: Whether the operation was successful """ - try: - self.post("api/project_branches/set_main", params=self.api_params()) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} as main branch", catch_all=True) - return False + self.post("api/project_branches/set_main", params=self.api_params()) for b in self.concerned_object.branches().values(): b._is_main = b.name == self.name return True @@ -317,30 +311,21 @@ def rename(self, new_name: str) -> bool: log.debug("Skipping rename %s with same new name", str(self)) return False log.info("Renaming main branch of %s from '%s' to '%s'", str(self.concerned_object), self.name, new_name) - try: - self.post(Branch.API[c.RENAME], params={"project": self.concerned_object.key, "name": new_name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"Renaming {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND, HTTPStatus.BAD_REQUEST)) - if isinstance(e, HTTPError): - if e.response.status_code == HTTPStatus.NOT_FOUND: - Branch.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.concerned_object.key, f"str{self.concerned_object} not found") - if e.response.status_code == HTTPStatus.BAD_REQUEST: - return False + self.post(Branch.API[c.RENAME], params={"project": self.concerned_object.key, "name": new_name}) Branch.CACHE.pop(self) self.name = new_name Branch.CACHE.put(self) return True - def get_findings(self) -> dict[str, object]: + def get_findings(self, filters: Optional[types.ApiParams] = None) -> dict[str, object]: """Returns a branch list of findings :return: dict of Findings, with finding key as key :rtype: dict{key: Finding} """ - findings = self.get_issues() - findings.update(self.get_hotspots()) - return findings + if not filters: + return self.concerned_object.get_findings(branch=self.name) + return self.get_issues(filters) | self.get_hotspots(filters) def component_data(self) -> dict[str, str]: """Returns key data""" diff --git a/sonar/components.py b/sonar/components.py index e6c08120d..782d6b1f3 100644 --- a/sonar/components.py +++ b/sonar/components.py @@ -19,9 +19,10 @@ # """ - Abstraction of the SonarQube "component" concept +Abstraction of the SonarQube "component" concept """ + from __future__ import annotations from typing import Optional import math @@ -117,7 +118,7 @@ def get_issues(self, filters: types.ApiParams = None) -> dict[str, object]: """Returns list of issues for a component, optionally on branches or/and PRs""" from sonar.issues import search_all - filters = {k: list(set(v) if isinstance(v, (list, set, tuple)) else v) for k, v in (filters or {}).items() if v is not None} + filters = {k: list(set(v)) if isinstance(v, (list, set, tuple)) else v for k, v in (filters or {}).items() if v is not None} log.info("Searching issues for %s with filters %s", str(self), str(filters)) issue_list = search_all(endpoint=self.endpoint, params=self.api_params() | {"additionalFields": "comments"} | filters) self.nbr_issues = len(issue_list) @@ -249,7 +250,7 @@ def set_visibility(self, visibility: str) -> None: def get_analyses(self, filter_in: Optional[list[str]] = None, filter_out: Optional[list[str]] = None) -> types.ApiPayload: """Returns a component analyses""" params = utilities.dict_remap(self.api_params(c.READ), {"component": "project"}) - data = self.endpoint.get_paginated("project_analyses/search", return_field="analyses", params=params)["analyses"] + data = self.endpoint.get_paginated("project_analyses/search", return_field="analyses", **params)["analyses"] if filter_in and len(filter_in) > 0: data = [d for d in data if any(e["category"] in filter_in for e in d["events"])] if filter_out and len(filter_out) > 0: diff --git a/sonar/config.json b/sonar/config.json index b79a9043c..d55d74c74 100644 --- a/sonar/config.json +++ b/sonar/config.json @@ -233,7 +233,10 @@ ["OPEN", "REOPENED"], ["CRITICAL", "HIGH"], ["MAJOR", "MEDIUM"], - ["MINOR", "LOW"] + ["MINOR", "LOW"], + ["VULNERABILITY", "SECURITY"], + ["BUG", "RELIABILITY"], + ["CODE_SMELL", "MAINTAINABILITY"] ], "fields": [ ["severities", "impactSeverities"], diff --git a/sonar/custom_measures.py b/sonar/custom_measures.py index 9556dbc92..ab1ca125e 100644 --- a/sonar/custom_measures.py +++ b/sonar/custom_measures.py @@ -19,9 +19,10 @@ # """ - Abstraction of the SonarQube "custom measure" concept +Abstraction of the SonarQube "custom measure" concept """ + import json import sonar.sqobject as sq import sonar.platform as pf diff --git a/sonar/dce/app_nodes.py b/sonar/dce/app_nodes.py index e66a0474e..886e5520e 100644 --- a/sonar/dce/app_nodes.py +++ b/sonar/dce/app_nodes.py @@ -19,7 +19,7 @@ # """ - Abstraction of the App Node concept +Abstraction of the App Node concept """ diff --git a/sonar/dce/nodes.py b/sonar/dce/nodes.py index c965f0e98..7a9061054 100644 --- a/sonar/dce/nodes.py +++ b/sonar/dce/nodes.py @@ -19,7 +19,7 @@ # """ - Abstraction of the DCE Node concept +Abstraction of the DCE Node concept """ diff --git a/sonar/dce/search_nodes.py b/sonar/dce/search_nodes.py index 1236fed96..543cc9e1d 100644 --- a/sonar/dce/search_nodes.py +++ b/sonar/dce/search_nodes.py @@ -19,7 +19,7 @@ # """ - Abstraction of the Search Node concept +Abstraction of the Search Node concept """ diff --git a/sonar/devops.py b/sonar/devops.py index 22fed1461..1db7e129f 100644 --- a/sonar/devops.py +++ b/sonar/devops.py @@ -19,13 +19,11 @@ # """Abstraction of the SonarQube DevOps platform concept""" + from __future__ import annotations from typing import Optional, Union -from http import HTTPStatus import json -from requests import RequestException - import sonar.logging as log from sonar.util import types, cache from sonar import platform @@ -108,11 +106,10 @@ def create(cls, endpoint: platform.Platform, key: str, plt_type: str, url_or_wor elif plt_type == "bitbucketcloud": params.update({"clientSecret": _TO_BE_SET, "clientId": _TO_BE_SET, "workspace": url_or_workspace}) endpoint.post(_CREATE_API_BBCLOUD, params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating devops platform {key}/{plt_type}/{url_or_workspace}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException as e: if endpoint.edition() in (c.CE, c.DE): log.warning("Can't set DevOps platform '%s', don't you have more that 1 of that type?", key) - raise exceptions.UnsupportedOperation(f"Can't set DevOps platform '{key}', don't you have more that 1 of that type?") + raise exceptions.UnsupportedOperation(e.message) from e o = DevopsPlatform(endpoint=endpoint, key=key, platform_type=plt_type) o.refresh() return o @@ -184,8 +181,7 @@ def update(self, **kwargs) -> bool: ok = self.post(f"alm_settings/update_{alm_type}", params=params).ok self.url = kwargs["url"] self._specific = {k: v for k, v in params.items() if k not in ("key", "url")} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"updating devops platform {self.key}/{alm_type}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException: ok = False return ok diff --git a/sonar/exceptions.py b/sonar/exceptions.py index e97efe3b0..9f6f9297d 100644 --- a/sonar/exceptions.py +++ b/sonar/exceptions.py @@ -23,6 +23,7 @@ Exceptions raised but the sonar python APIs """ + from sonar import errcodes diff --git a/sonar/findings.py b/sonar/findings.py index 784654d40..0372f5931 100644 --- a/sonar/findings.py +++ b/sonar/findings.py @@ -23,8 +23,7 @@ import concurrent.futures from datetime import datetime from typing import Optional -from http import HTTPStatus -from requests import RequestException +import re import Levenshtein import sonar.logging as log @@ -32,6 +31,7 @@ import sonar.platform as pf from sonar.util import types from sonar.util import constants as c, issue_defs as idefs +from sonar import exceptions import sonar.utilities as util from sonar import projects, rules @@ -182,7 +182,7 @@ def assign(self, assignee: Optional[str] = None) -> str: def language(self) -> str: """Returns the finding language""" - return rules.get_object(endpoint=self.endpoint, key=self.rule).language + return rules.Rule.get_object(endpoint=self.endpoint, key=self.rule).language def to_csv(self, without_time: bool = False) -> list[str]: """ @@ -458,9 +458,10 @@ def search_siblings( def do_transition(self, transition: str) -> bool: try: return self.post("issues/do_transition", {"issue": self.key, "transition": transition}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"applying transition {transition}", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - return False + except exceptions.SonarException as e: + if re.match(r"Transition from state [A-Za-z]+ does not exist", e.message): + raise exceptions.UnsupportedOperation(e.message) from e + raise def get_branch_and_pr(self, data: types.ApiPayload) -> tuple[Optional[str], Optional[str]]: """ diff --git a/sonar/groups.py b/sonar/groups.py index 49db31e04..a747e5f91 100644 --- a/sonar/groups.py +++ b/sonar/groups.py @@ -20,14 +20,12 @@ # """Abstraction of the SonarQube group concept""" + from __future__ import annotations import json from typing import Optional -from http import HTTPStatus -from requests import HTTPError, RequestException - import sonar.logging as log import sonar.platform as pf import sonar.sqobject as sq @@ -114,11 +112,7 @@ def create(cls, endpoint: pf.Platform, name: str, description: str = None) -> Gr :return: The group object """ log.debug("Creating group '%s'", name) - try: - data = json.loads(endpoint.post(Group.api_for(c.CREATE, endpoint), params={"name": name, "description": description}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating group '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, util.sonar_error(e.response)) + data = json.loads(endpoint.post(Group.api_for(c.CREATE, endpoint), params={"name": name, "description": description}).text) o = cls.read(endpoint=endpoint, name=name) o.sq_json.update(data) return o @@ -168,10 +162,10 @@ def delete(self) -> bool: ok = self.post(api=Group.API_V1[c.DELETE], params=self.api_params(c.DELETE)).ok if ok: log.info("Removing from %s cache", str(self.__class__.__name__)) - self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + Group.CACHE.pop(self) + except exceptions.ObjectNotFound: + Group.CACHE.pop(self) + raise return ok def api_params(self, op: str) -> types.ApiParams: @@ -202,7 +196,7 @@ def members(self, use_cache: bool = True) -> list[users.User]: data = json.loads(self.get(MEMBERSHIP_API, params={"groupId": self.id}).text) self.__members = [users.User.get_object_by_id(self.endpoint, d["userId"]) for d in data["groupMemberships"]] else: - data = self.endpoint.get_paginated("api/user_groups/users", return_field="users", params={"name": self.name}) + data = self.endpoint.get_paginated("api/user_groups/users", return_field="users", name=self.name) self.__members = [users.User.get_object(self.endpoint, d["login"]) for d in data["users"]] return self.__members @@ -226,21 +220,11 @@ def add_user(self, user: object) -> bool: :return: Whether the operation succeeded """ log.info("Adding %s to %s", str(user), str(self)) - try: - if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: - params = {"groupId": self.id, "userId": user.id} - else: - params = {"login": user.login, "name": self.name} - r = self.post(Group.api_for(ADD_USER, self.endpoint), params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, "adding user to group", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - if isinstance(e, HTTPError): - code = e.response.status_code - if code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(util.sonar_error(e.response)) - if code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(user.login, util.sonar_error(e.response)) - return r.ok + if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: + params = {"groupId": self.id, "userId": user.id} + else: + params = {"login": user.login, "name": self.name} + return self.post(Group.api_for(ADD_USER, self.endpoint), params=params).ok def remove_user(self, user: object) -> bool: """Removes a user from the group @@ -250,24 +234,14 @@ def remove_user(self, user: object) -> bool: :rtype: bool """ log.info("Removing %s from %s", str(user), str(self)) - try: - if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: - for m in json.loads(self.get(MEMBERSHIP_API, params={"userId": user.id}).text)["groupMemberships"]: - if m["groupId"] == self.id: - return self.endpoint.delete(f"{Group.api_for(REMOVE_USER, self.endpoint)}/{m['id']}").ok - raise exceptions.ObjectNotFound(user.login, f"{str(self)} or user id '{user.id} not found") - else: - params = {"login": user.login, "name": self.name} - return self.post(Group.api_for(REMOVE_USER, self.endpoint), params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, "removing user from group", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - if isinstance(e, HTTPError): - code = e.response.status_code - if code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(util.sonar_error(e.response)) - if code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(user.login, util.sonar_error(e.response)) - return False + if self.endpoint.version() >= c.GROUP_API_V2_INTRO_VERSION: + for m in json.loads(self.get(MEMBERSHIP_API, params={"userId": user.id}).text)["groupMemberships"]: + if m["groupId"] == self.id: + return self.endpoint.delete(f"{Group.api_for(REMOVE_USER, self.endpoint)}/{m['id']}").ok + raise exceptions.ObjectNotFound(user.login, f"{str(self)} or user id '{user.id} not found") + else: + params = {"login": user.login, "name": self.name} + return self.post(Group.api_for(REMOVE_USER, self.endpoint), params=params).ok def audit(self, audit_settings: types.ConfigSettings = None) -> list[Problem]: """Audits a group and return list of problems found @@ -409,9 +383,8 @@ def get_object_from_id(endpoint: pf.Platform, id: str) -> Group: raise exceptions.UnsupportedOperation("Operation unsupported before SonarQube 10.4") if len(Group.CACHE) == 0: get_list(endpoint) - for o in Group.CACHE.values(): - if o.id == id: - return o + if gr := next((o for o in Group.CACHE.values() if o.id == id), None): + return gr raise exceptions.ObjectNotFound(id, message=f"Group '{id}' not found") diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 344befb1d..fb168ad44 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -25,7 +25,6 @@ from datetime import datetime from typing import Optional from http import HTTPStatus -from requests import RequestException import requests.utils import sonar.logging as log @@ -36,6 +35,7 @@ from sonar import users from sonar import findings, rules, changelog +from sonar import exceptions PROJECT_FILTER = "project" PROJECT_FILTER_OLD = "projectKey" @@ -152,19 +152,18 @@ def refresh(self) -> bool: self.rule = d["rule"]["key"] self.assignee = d.get("assignee", None) return resp.ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, "refreshing hotspot", catch_all=True) + except exceptions.SonarException: return False def __mark_as(self, resolution: Optional[str], comment: Optional[str] = None, status: str = "REVIEWED") -> bool: try: params = util.remove_nones({"hotspot": self.key, "status": status, "resolution": resolution, "commemt": comment}) - r = self.post("hotspots/change_status", params=params) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, f"marking hotspot as {status}/{resolution}", catch_all=True) + ok = self.post("hotspots/change_status", params=params).ok + self.refresh() + except exceptions.SonarException: return False - self.refresh() - return r.ok + else: + return ok def mark_as_safe(self) -> bool: """Marks a hotspot as safe @@ -215,8 +214,7 @@ def add_comment(self, comment: str) -> bool: """ try: return self.post("hotspots/add_comment", params={"hotspot": self.key, "comment": comment}).ok - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "adding comment to hotspot", catch_all=True) + except exceptions.SonarException: return False def assign(self, assignee: Optional[str], comment: Optional[str] = None) -> bool: @@ -231,13 +229,13 @@ def assign(self, assignee: Optional[str], comment: Optional[str] = None) -> bool log.debug("Unassigning %s", str(self)) else: log.debug("Assigning %s to '%s'", str(self), str(assignee)) - r = self.post("hotspots/assign", util.remove_nones({"hotspot": self.key, "assignee": assignee, "comment": comment})) - if r.ok: + ok = self.post("hotspots/assign", util.remove_nones({"hotspot": self.key, "assignee": assignee, "comment": comment})).ok + if ok: self.assignee = assignee - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "assigning/unassigning hotspot", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def unassign(self, comment: Optional[str] = None) -> bool: """Unassigns a hotspot (and optionally comment) @@ -421,8 +419,7 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str, try: data = json.loads(endpoint.get(Hotspot.API[c.SEARCH], params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text) nbr_hotspots = util.nbr_total_elements(data) - except (ConnectionError, RequestException) as e: - util.handle_error(e, "searching hotspots", catch_all=True) + except exceptions.SonarException: nbr_hotspots = 0 return {} nbr_pages = util.nbr_pages(data) @@ -500,7 +497,7 @@ def post_search_filter(hotspots_dict: dict[str, Hotspot], filters: types.ApiPara log.debug("%d hotspots remaining after filtering by createdBefore %s", len(filtered_findings), str(filters["createdBefore"])) if "languages" in filters: filtered_findings = { - k: v for k, v in filtered_findings.items() if rules.get_object(endpoint=v.endpoint, key=v.rule).language in filters["languages"] + k: v for k, v in filtered_findings.items() if rules.Rule.get_object(endpoint=v.endpoint, key=v.rule).language in filters["languages"] } log.debug("%d hotspots remaining after filtering by languages %s", len(filtered_findings), str(filters["languages"])) log.debug("%d hotspots remaining after post search filtering", len(filtered_findings)) diff --git a/sonar/issues.py b/sonar/issues.py index 3fefaf72c..35ecf9d14 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -57,6 +57,9 @@ OLD_FP = "FALSE-POSITIVE" NEW_FP = "FALSE_POSITIVE" +_MQR_SEARCH_FIELDS = (_NEW_SEARCH_SEVERITY_FIELD, _NEW_SEARCH_STATUS_FIELD, _NEW_SEARCH_TYPE_FIELD) +_STD_SEARCH_FIELDS = (_OLD_SEARCH_SEVERITY_FIELD, _OLD_SEARCH_STATUS_FIELD, _OLD_SEARCH_TYPE_FIELD) + _COMMA_CRITERIAS = ( _OLD_SEARCH_COMPONENT_FIELD, _NEW_SEARCH_COMPONENT_FIELD, @@ -290,19 +293,13 @@ def add_comment(self, comment: str) -> bool: """ log.debug("Adding comment '%s' to %s", comment, str(self)) try: - r = self.post("issues/add_comment", {"issue": self.key, "text": comment}) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "adding comment", catch_all=True) + return self.post("issues/add_comment", {"issue": self.key, "text": comment}).ok + except exceptions.SonarException: return False - return r.ok def __set_severity(self, **params) -> bool: - try: - log.debug("Changing severity of %s from '%s' to '%s'", str(self), self.severity, str(params)) - r = self.post("issues/set_severity", {"issue": self.key, **params}) - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "changing issue severity", catch_all=True) - return False + log.debug("Changing severity of %s from '%s' to '%s'", str(self), self.severity, str(params)) + r = self.post("issues/set_severity", {"issue": self.key, **params}) return r.ok def set_severity(self, severity: str) -> bool: @@ -344,13 +341,12 @@ def assign(self, assignee: Optional[str] = None) -> bool: try: params = util.remove_nones({"issue": self.key, "assignee": assignee}) log.debug("Assigning %s to '%s'", str(self), str(assignee)) - r = self.post("issues/assign", params) - if r.ok: + if ok := self.post("issues/assign", params).ok: self.assignee = assignee - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "assigning issue", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def get_tags(self, **kwargs) -> list[str]: """Returns issues tags""" @@ -396,13 +392,12 @@ def set_type(self, new_type: str) -> bool: raise exceptions.UnsupportedOperation("Changing issue type is not supported in MQR mode") log.debug("Changing type of issue %s from %s to %s", self.key, self.type, new_type) try: - r = self.post("issues/set_type", {"issue": self.key, "type": new_type}) - if r.ok: + if ok := self.post("issues/set_type", {"issue": self.key, "type": new_type}).ok: self.type = new_type - except (ConnectionError, requests.RequestException) as e: - util.handle_error(e, "setting issue type", catch_all=True) + except exceptions.SonarException: return False - return r.ok + else: + return ok def is_wont_fix(self) -> bool: """ @@ -1002,6 +997,8 @@ def pre_search_filters(endpoint: pf.Platform, params: ApiParams) -> ApiParams: if allowed is not None and filters[field] is not None: filters[field] = list(set(util.intersection(filters[field], allowed))) + disallowed = _STD_SEARCH_FIELDS if endpoint.is_mqr_mode() else _MQR_SEARCH_FIELDS + filters = {k: v for k, v in filters.items() if k not in disallowed} filters = {k: util.list_to_csv(v) for k, v in filters.items() if v} log.debug("Sanitized issue search filters %s", str(filters)) return filters diff --git a/sonar/languages.py b/sonar/languages.py index 178af1925..7cf7b765c 100644 --- a/sonar/languages.py +++ b/sonar/languages.py @@ -20,6 +20,7 @@ # """Abstraction of the SonarQube language concept""" + from __future__ import annotations import json @@ -28,6 +29,7 @@ import sonar.platform as pf from sonar.util.types import ApiPayload from sonar.util import cache +from sonar.util import constants as c #: List of language APIs APIS = {"list": "languages/list"} @@ -74,10 +76,10 @@ def number_of_rules(self, rule_type: str = None) -> int: :returns: Nbr of rules for that language (and optional type) :rtype: int """ - if not rule_type or rule_type not in rules.LEGACY_TYPES: + if not rule_type or rule_type not in (c.VULN, c.HOTSPOT, c.BUG, c.CODE_SMELL): rule_type = "_ALL" if not self._nb_rules[rule_type]: - self._nb_rules[rule_type] = rules.search(self.endpoint, languages=self.key, types=rule_type) + self._nb_rules[rule_type] = rules.search(self.endpoint, params={"languages": self.key, "types": rule_type}) return self._nb_rules[rule_type] diff --git a/sonar/logging.py b/sonar/logging.py index 265521757..8fb0ec567 100644 --- a/sonar/logging.py +++ b/sonar/logging.py @@ -17,7 +17,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-tools logging module """ +"""sonar-tools logging module""" import logging diff --git a/sonar/measures.py b/sonar/measures.py index 30c5860df..77433697e 100644 --- a/sonar/measures.py +++ b/sonar/measures.py @@ -19,11 +19,10 @@ # """Abstraction of the SonarQube measure concept""" + from __future__ import annotations import json -from http import HTTPStatus -from requests import RequestException from sonar import metrics, exceptions, platform from sonar.util.types import ApiPayload, ApiParams, KeyList from sonar.util import cache, constants as c @@ -139,11 +138,7 @@ def get(concerned_object: object, metrics_list: KeyList, **kwargs) -> dict[str, params["metricKeys"] = util.list_to_csv(metrics_list) log.debug("Getting measures with %s", str(params)) - try: - data = json.loads(concerned_object.endpoint.get(Measure.API_READ, params={**kwargs, **params}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting measures {str(metrics_list)} of {str(concerned_object)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") + data = json.loads(concerned_object.endpoint.get(Measure.API_READ, params={**kwargs, **params}).text) m_dict = dict.fromkeys(metrics_list, None) for m in data["component"]["measures"]: m_dict[m["metric"]] = Measure.load(data=m, concerned_object=concerned_object) @@ -168,15 +163,7 @@ def get_history(concerned_object: object, metrics_list: KeyList, **kwargs) -> li params["metrics"] = util.list_to_csv(metrics_list) log.debug("Getting measures history with %s", str(params)) - try: - data = json.loads(concerned_object.endpoint.get(Measure.API_HISTORY, params={**kwargs, **params}).text) - except (ConnectionError, RequestException) as e: - util.handle_error( - e, - f"getting measures {str(metrics_list)} history of {str(concerned_object)}", - catch_http_statuses=(HTTPStatus.NOT_FOUND,), - ) - raise exceptions.ObjectNotFound(concerned_object.key, f"{str(concerned_object)} not found") + data = json.loads(concerned_object.endpoint.get(Measure.API_HISTORY, params={**kwargs, **params}).text) res_list = [] for m in reversed(data["measures"]): res_list += [[dt["date"], m["metric"], dt["value"]] for dt in m["history"] if "value" in dt] diff --git a/sonar/metrics.py b/sonar/metrics.py index f77b06077..fc21d5cd2 100644 --- a/sonar/metrics.py +++ b/sonar/metrics.py @@ -105,8 +105,7 @@ def __init__(self, endpoint: pf.Platform, key: str, data: ApiPayload = None) -> @classmethod def get_object(cls, endpoint: pf.Platform, key: str) -> Metric: search(endpoint=endpoint) - o = Metric.CACHE.get(key, endpoint.local_url) - if not o: + if not (o := Metric.CACHE.get(key, endpoint.local_url)): raise exceptions.ObjectNotFound(key, f"Metric key '{key}' not found") return o @@ -156,26 +155,17 @@ def search(endpoint: pf.Platform, show_hidden_metrics: bool = False, use_cache: def is_a_rating(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is a rating""" - try: - return Metric.get_object(endpoint, metric_key).is_a_rating() - except exceptions.ObjectNotFound: - return False + return Metric.get_object(endpoint, metric_key).is_a_rating() def is_a_percent(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is a percent""" - try: - return Metric.get_object(endpoint, metric_key).is_a_percent() - except exceptions.ObjectNotFound: - return False + return Metric.get_object(endpoint, metric_key).is_a_percent() def is_an_effort(endpoint: pf.Platform, metric_key: str) -> bool: """Whether a metric is an effort""" - try: - return Metric.get_object(endpoint, metric_key).is_an_effort() - except exceptions.ObjectNotFound: - return False + Metric.get_object(endpoint, metric_key).is_an_effort() def count(endpoint: pf.Platform, use_cache: bool = True) -> int: diff --git a/sonar/organizations.py b/sonar/organizations.py index 4e799adb3..57627a486 100644 --- a/sonar/organizations.py +++ b/sonar/organizations.py @@ -19,15 +19,13 @@ # """ - Abstraction of the SonarQube Cloud organization concept +Abstraction of the SonarQube Cloud organization concept """ from __future__ import annotations import json -from http import HTTPStatus from threading import Lock -from requests import RequestException import sonar.logging as log import sonar.platform as pf @@ -73,15 +71,9 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Organization: """ if not endpoint.is_sonarcloud(): raise exceptions.UnsupportedOperation(_NOT_SUPPORTED) - o = Organization.CACHE.get(key, endpoint.local_url) - if o: + if o := Organization.CACHE.get(key, endpoint.local_url): return o - try: - data = json.loads(endpoint.get(Organization.API[c.SEARCH], params={"organizations": key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting organization {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Organization '{key}' not found") - + data = json.loads(endpoint.get(Organization.API[c.SEARCH], params={"organizations": key}).text) if len(data["organizations"]) == 0: raise exceptions.ObjectNotFound(key, f"Organization '{key}' not found") return cls.load(endpoint, data["organizations"][0]) @@ -192,8 +184,8 @@ def exists(endpoint: pf.Platform, org_key: str) -> bool: log.info("Verifying that organization '%s' exists", org_key) try: _ = Organization.get_object(endpoint=endpoint, key=org_key) - log.warning("Organization '%s' does not exist or user is not a member", org_key) except exceptions.ObjectNotFound: + log.warning("Organization '%s' does not exist or user is not a member", org_key) return False log.debug("Organization '%s' exists and user is a member", org_key) return True diff --git a/sonar/permissions/global_permissions.py b/sonar/permissions/global_permissions.py index f1aa19885..da1bca153 100644 --- a/sonar/permissions/global_permissions.py +++ b/sonar/permissions/global_permissions.py @@ -19,6 +19,7 @@ # """Abstraction of SonarQube global permissions""" + from __future__ import annotations import sonar.logging as log diff --git a/sonar/permissions/permission_templates.py b/sonar/permissions/permission_templates.py index 6331f00dc..627ce911a 100644 --- a/sonar/permissions/permission_templates.py +++ b/sonar/permissions/permission_templates.py @@ -19,15 +19,15 @@ # """Abstraction of the SonarQube permission template concept""" + from __future__ import annotations import json import re -from requests import RequestException import sonar.logging as log from sonar.util import types, cache -from sonar import sqobject, utilities +from sonar import sqobject, utilities, exceptions from sonar.permissions import template_permissions import sonar.platform as pf from sonar.audit.rules import get_rule, RuleId @@ -135,7 +135,7 @@ def permissions(self) -> template_permissions.TemplatePermissions: self._permissions = template_permissions.TemplatePermissions(self) return self._permissions - def set_as_default(self, what_list: list[str]) -> None: + def set_as_default(self, what_list: list[str]) -> bool: """Sets a permission template as default for projects or apps or portfolios""" log.debug("Setting %s as default for %s", str(self), str(what_list)) ed = self.endpoint.edition() @@ -145,9 +145,10 @@ def set_as_default(self, what_list: list[str]) -> None: log.warning("Can't set permission template as default for %s on a %s edition", qual, ed) continue try: - self.post("permissions/set_default_template", params={"templateId": self.key, "qualifier": qual}) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting {str(self)} as default") + return self.post("permissions/set_default_template", params={"templateId": self.key, "qualifier": qual}).ok + except exceptions.SonarException: + return False + return False def set_pattern(self, pattern: str) -> PermissionTemplate: """Sets a permission template pattern""" diff --git a/sonar/permissions/permissions.py b/sonar/permissions/permissions.py index 4b5e4aa00..4ae1e9b84 100644 --- a/sonar/permissions/permissions.py +++ b/sonar/permissions/permissions.py @@ -25,10 +25,9 @@ import json from abc import ABC, abstractmethod -from requests import RequestException import sonar.logging as log -from sonar import utilities +from sonar import utilities, exceptions from sonar.util import types from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem @@ -299,15 +298,14 @@ def _get_api(self, api: str, perm_type: str, ret_field: str, **extra_params) -> else: counter += 1 page, nbr_pages = page + 1, utilities.nbr_pages(data) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting permissions of {str(self)}", catch_all=True) + except exceptions.SonarException: page += 1 return perms def _post_api(self, api: str, set_field: str, perms_dict: types.JsonPermissions, **extra_params) -> bool: if perms_dict is None: return True - result = False + ok = True params = extra_params.copy() for u, perms in perms_dict.items(): params[set_field] = u @@ -315,11 +313,10 @@ def _post_api(self, api: str, set_field: str, perms_dict: types.JsonPermissions, for p in filtered_perms: params["permission"] = p try: - r = self.endpoint.post(api, params=params) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting permissions of {str(self)}", catch_all=True) - result = result and r.ok - return result + ok = self.endpoint.post(api, params=params).ok and ok + except exceptions.SonarException: + ok = False + return ok def simplify(perms_dict: dict[str, list[str]]) -> Optional[dict[str, str]]: diff --git a/sonar/permissions/portfolio_permissions.py b/sonar/permissions/portfolio_permissions.py index b4f1e33e1..a6bd63ca9 100644 --- a/sonar/permissions/portfolio_permissions.py +++ b/sonar/permissions/portfolio_permissions.py @@ -19,6 +19,7 @@ # """Abstraction of portfolio permissions""" + from sonar.permissions import aggregation_permissions diff --git a/sonar/permissions/project_permissions.py b/sonar/permissions/project_permissions.py index 2e4ec9d83..989c91627 100644 --- a/sonar/permissions/project_permissions.py +++ b/sonar/permissions/project_permissions.py @@ -72,6 +72,9 @@ def _set_perms( self.read() for p in permissions.PERMISSION_TYPES: to_remove = diff_func(self.permissions.get(p, {}), new_perms.get(p, {})) + if p == "users" and "admin" in to_remove: + # Don't remove admin permission to the admin user, this is not possible anyway + to_remove["admin"] = [v for v in to_remove["admin"] if v != "admin"] self._post_api(apis["remove"][p], field[p], to_remove, **kwargs) to_add = diff_func(new_perms.get(p, {}), self.permissions.get(p, {})) self._post_api(apis["add"][p], field[p], to_add, **kwargs) diff --git a/sonar/permissions/quality_permissions.py b/sonar/permissions/quality_permissions.py index 4f5ceeabf..e3a2bb0e0 100644 --- a/sonar/permissions/quality_permissions.py +++ b/sonar/permissions/quality_permissions.py @@ -24,11 +24,10 @@ from typing import Optional import json -from requests import RequestException from sonar.util import types import sonar.logging as log -from sonar import utilities +from sonar import utilities, exceptions from sonar.audit.problem import Problem from sonar.permissions import permissions @@ -86,8 +85,7 @@ def _get_api(self, api: str, perm_type: tuple[str, ...], ret_field: str, **extra data = json.loads(resp.text) perms += [p[ret_field] for p in data[perm_type]] page, nbr_pages = page + 1, utilities.nbr_pages(data) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting permissions of {str(self)}", catch_all=True) + except exceptions.SonarException: page += 1 return perms diff --git a/sonar/platform.py b/sonar/platform.py index de0b7e73b..364a86b5d 100644 --- a/sonar/platform.py +++ b/sonar/platform.py @@ -19,14 +19,16 @@ # """ - Abstraction of the SonarQube platform or instance concept +Abstraction of the SonarQube platform or instance concept """ +from __future__ import annotations from http import HTTPStatus import sys import os +import re from typing import Optional import time import datetime @@ -45,6 +47,7 @@ import sonar.audit.severities as sev import sonar.audit.types as typ from sonar.audit.problem import Problem +from sonar import webhooks WRONG_CONFIG_MSG = "Audit config property %s has wrong value %s, skipping audit" @@ -60,12 +63,14 @@ class Platform(object): """Abstraction of the SonarQube "platform" concept""" - def __init__(self, url: str, token: str, org: str = None, cert_file: Optional[str] = None, http_timeout: int = 10, **kwargs) -> None: + def __init__( + self, url: str, token: str, org: Optional[str] = None, cert_file: Optional[str] = None, http_timeout: int = 10, **kwargs: str + ) -> None: """Creates a SonarQube platform object - :param url: base URL of the SonarQube platform - :param token: token to connect to the platform - :param cert_file: Client certificate, if any needed, defaults to None + :param str url: base URL of the SonarQube platform + :param str token: token to connect to the platform + :param str cert_file: Client certificate, if any needed, defaults to None :return: the SonarQube object :rtype: Platform """ @@ -86,7 +91,8 @@ def __init__(self, url: str, token: str, org: str = None, cert_file: Optional[st def __str__(self) -> str: """ - Returns the string representation of the SonarQube connection, with the token recognizable but largely redacted + Returns the string representation of the SonarQube connection, + with the token recognizable but largely redacted """ return f"{util.redacted_token(self.__token)}@{self.local_url}" @@ -111,15 +117,11 @@ def verify_connection(self) -> None: raise exceptions.ConnectionError(f"{str(e)} while connecting to {self.local_url}") def url(self) -> str: - """ - Returns the SonarQube URL - """ + """Returns the SonarQube URL""" return self.external_url def version(self) -> tuple[int, int, int]: - """ - Returns the SonarQube platform version or 0.0.0 for SonarQube Cloud - """ + """Returns the SonarQube platform version or 0.0.0 for SonarQube Cloud""" if self.is_sonarcloud(): return 0, 0, 0 if self._version is None: @@ -129,7 +131,7 @@ def version(self) -> tuple[int, int, int]: def release_date(self) -> Optional[datetime.date]: """ - :returns: the SonarQube platform release date if found in update center or None if SonarQube Cloud or if the date cannot be found + :return: the SonarQube platform release date if found in update center or None if SonarQube Cloud or if the date cannot be found """ if self.is_sonarcloud(): return None @@ -137,7 +139,7 @@ def release_date(self) -> Optional[datetime.date]: def edition(self) -> str: """ - Returns the Sonar edition: "community", "developer", "enterprise", c.DCE or "sonarcloud" + Returns the Sonar edition: "community", "developer", "enterprise", "datacenter" or "sonarcloud" """ if self.is_sonarcloud(): return c.SC @@ -157,12 +159,11 @@ def user_data(self) -> types.ApiPayload: return self.__user_data def set_user_agent(self, user_agent: str) -> None: + """Sets the user agent for HTTP requests""" self._user_agent = user_agent def server_id(self) -> str: - """ - Returns the SonarQube instance server id - """ + """Returns the SonarQube instance server id""" if self._server_id is not None: return self._server_id if self._sys_info is not None and _SERVER_ID_KEY in self._sys_info["System"]: @@ -172,15 +173,13 @@ def server_id(self) -> str: return self._server_id def is_sonarcloud(self) -> bool: - """ - Returns whether the target platform is SonarQube Cloud - """ + """Returns whether the target platform is SonarQube Cloud""" return util.is_sonarcloud_url(self.local_url) def basics(self) -> dict[str, str]: """ - :return: the 3 basic information of the platform: ServerId, Edition and Version - :rtype: dict{"serverId": , "edition": , "version": } + :return: the basic information of the platform: ServerId, Edition and Version + :rtype: dict{"serverId": , "edition": , "version": , "plugins": } """ url = self.get_setting(key="sonar.core.serverBaseURL") @@ -192,6 +191,19 @@ def basics(self) -> dict[str, str]: return {**data, "version": util.version_to_string(self.version()[:3]), "serverId": self.server_id(), "plugins": self.plugins()} + def default_user_group(self) -> str: + """ + :return: the built-in default group name on that platform + """ + return c.SQC_USERS if self.is_sonarcloud() else c.SQS_USERS + + def is_default_user_group(self, group_name: str) -> bool: + """ + :param str group_name: group name to check + :return: whether the group is a built-in default group + """ + return group_name == self.default_user_group() + def get(self, api: str, params: types.ApiParams = None, **kwargs) -> requests.Response: """Makes an HTTP GET request to SonarQube @@ -242,35 +254,19 @@ def delete(self, api: str, params: types.ApiParams = None, **kwargs) -> requests """ return self.__run_request(requests.delete, api, params, **kwargs) - def default_user_group(self) -> str: - """ - :return: the built-in default group name on that platform - """ - return c.SQC_USERS if self.is_sonarcloud() else c.SQS_USERS - - def is_default_user_group(self, group_name: str) -> bool: - """ - :param str group_name: group name to check - :return: whether the group is a built-in default group - """ - return group_name == self.default_user_group() - def __run_request(self, request: callable, api: str, params: types.ApiParams = None, **kwargs) -> requests.Response: """Makes an HTTP request to SonarQube""" mute = kwargs.pop("mute", ()) api = _normalize_api(api) - headers = {"user-agent": self._user_agent, "accept": _APP_JSON} - headers.update(kwargs.get("headers", {})) - if params is None: - params = {} + headers = {"user-agent": self._user_agent, "accept": _APP_JSON} | kwargs.get("headers", {}) + params = params or {} with_org = kwargs.pop("with_organization", True) if self.is_sonarcloud(): headers["Authorization"] = f"Bearer {self.__token}" if with_org: params["organization"] = self.organization - req_type, url = "", "" + req_type, url = getattr(request, "__name__", repr(request)).upper(), "" if log.get_level() <= log.DEBUG: - req_type = getattr(request, "__name__", repr(request)).upper() url = self.__urlstring(api, params, kwargs.get("data", {})) log.debug("%s: %s", req_type, url) kwargs["headers"] = headers @@ -292,25 +288,37 @@ def __run_request(self, request: callable, api: str, params: types.ApiParams = N self.local_url = new_url r.raise_for_status() except HTTPError as e: - lvl = log.DEBUG if r.status_code in mute else log.ERROR + code = r.status_code + lvl = log.DEBUG if code in mute else log.ERROR log.log(lvl, "%s (%s request)", util.error_msg(e), req_type) - raise e - except (ConnectionError, RequestException) as e: + err_msg = util.sonar_error(e.response) + err_msg_lower = err_msg.lower() + key = next((params[k] for k in ("key", "project", "component", "componentKey") if k in params), "Unknown") + if any( + msg in err_msg_lower for msg in ("not found", "no quality gate has been found", "does not exist", "could not find") + ): # code == HTTPStatus.NOT_FOUND: + raise exceptions.ObjectNotFound(key, err_msg) from e + if any(msg in err_msg_lower for msg in ("already exists", "already been taken")): + raise exceptions.ObjectAlreadyExists(key, err_msg) from e + if re.match(r"(Value of parameter .+ must be one of|No enum constant)", err_msg): + raise exceptions.UnsupportedOperation(err_msg) from e + if any(msg in err_msg_lower for msg in ("insufficient privileges", "insufficient permissions")): + raise exceptions.SonarException(err_msg, errcodes.SONAR_API_AUTHORIZATION) from e + if "unknown url" in err_msg_lower: + raise exceptions.UnsupportedOperation(err_msg) from e + raise exceptions.SonarException(err_msg, errcodes.SONAR_API) from e + except ConnectionError as e: util.handle_error(e, "") return r - def get_paginated(self, api: str, return_field: str, params: types.ApiParams = None) -> types.ObjectJsonRepr: + def get_paginated(self, api: str, return_field: str, **kwargs: str) -> types.ObjectJsonRepr: """Returns all pages of a paginated API""" - new_params = {} if params is None else params.copy() - new_params["ps"] = 500 - new_params["p"] = 1 - data = json.loads(self.get(api, params=new_params).text) - nb_pages = util.nbr_pages(data, api_version=1) - if nb_pages == 1: + params = {"ps": 500} | kwargs + data = json.loads(self.get(api, params=params | {"p": 1}).text) + if (nb_pages := util.nbr_pages(data, api_version=1)) == 1: return data for page in range(2, nb_pages + 1): - new_params["p"] = page - data[return_field].update(json.loads(self.get(api, params=new_params).text)[return_field]) + data[return_field].update(json.loads(self.get(api, params=params | {"p": page}).text)[return_field]) return data def global_permissions(self) -> dict[str, any]: @@ -451,13 +459,10 @@ def __urlstring(self, api: str, params: types.ApiParams, data: str = None) -> st url += f" - BODY: {data}" return url - def webhooks(self) -> dict[str, object]: + def webhooks(self) -> dict[str, webhooks.WebHook]: """ :return: the list of global webhooks - :rtype: dict{: , ...} """ - from sonar import webhooks - return webhooks.get_list(self) def export(self, export_settings: types.ConfigSettings, full: bool = False) -> types.ObjectJsonRepr: @@ -500,16 +505,12 @@ def set_webhooks(self, webhooks_data: types.ObjectJsonRepr) -> bool: """Sets global webhooks with a list of webhooks represented as JSON :param webhooks_data: the webhooks JSON representation - :return: Whether the operation succeeded or not + :return: The number of webhooks configured """ + log.debug("%s setting webhooks %s", str(self), str(webhooks_data)) if webhooks_data is None: return False - current_wh = self.webhooks() - # FIXME: Handle several webhooks with same name - current_wh_names = [wh.name for wh in current_wh.values()] - wh_map = {wh.name: k for k, wh in current_wh.items()} - log.debug("Current webhooks = %s", str(current_wh_names)) - _ = [current_wh[wh_map[wh_name]].update(name=wh_name, **wh) for wh_name, wh in webhooks_data.items() if wh_name in current_wh_names] + webhooks.import_config(self, webhooks_data) return True def import_config(self, config_data: types.ObjectJsonRepr) -> int: @@ -524,9 +525,15 @@ def import_config(self, config_data: types.ObjectJsonRepr) -> int: count = 0 config_data = config_data.get("globalSettings", {}) flat_settings = util.flatten(config_data) - count += sum(1 if self.set_webhooks(v) else 0 for k, v in config_data.get("webhooks", None) or {}) count += sum(1 if self.set_setting(k, v) else 0 for k, v in flat_settings.items()) + try: + wh_data = config_data["generalSettings"]["webhooks"] + self.set_webhooks(wh_data) + count += len(wh_data) + except KeyError: + pass + if settings.NEW_CODE_PERIOD in config_data.get("generalSettings", {}): (nc_type, nc_val) = settings.decode(settings.NEW_CODE_PERIOD, config_data["generalSettings"][settings.NEW_CODE_PERIOD]) try: diff --git a/sonar/portfolio_reference.py b/sonar/portfolio_reference.py index 3ab370882..4343ca0c3 100644 --- a/sonar/portfolio_reference.py +++ b/sonar/portfolio_reference.py @@ -19,19 +19,17 @@ # """ - Abstraction of the Sonar sub-portfolio by reference concept +Abstraction of the Sonar sub-portfolio by reference concept """ from __future__ import annotations -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.platform as pf from sonar.util import types, cache -from sonar import exceptions, utilities +from sonar import exceptions import sonar.sqobject as sq import sonar.util.constants as c @@ -70,14 +68,7 @@ def load(cls, reference: object, parent: object) -> PortfolioReference: @classmethod def create(cls, reference: object, parent: object, params: types.ApiParams = None) -> PortfolioReference: """Constructor, don't use - use class methods instead""" - - try: - parent.endpoint.post("views/add_portfolio", params={"portfolio": parent.key, "reference": reference.key}) - except (ConnectionError, RequestException) as e: - utilities.handle_error( - e, f"creating portfolio reference to {str(reference)} in {str(parent)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,) - ) - raise exceptions.ObjectAlreadyExists + parent.endpoint.post("views/add_portfolio", params={"portfolio": parent.key, "reference": reference.key}) return PortfolioReference(reference=reference, parent=parent) def __str__(self) -> str: diff --git a/sonar/portfolios.py b/sonar/portfolios.py index 2686e3335..9057f5f42 100644 --- a/sonar/portfolios.py +++ b/sonar/portfolios.py @@ -19,7 +19,7 @@ # """ - Abstraction of the SonarQube "portfolio" concept +Abstraction of the SonarQube "portfolio" concept """ @@ -30,7 +30,6 @@ import json from http import HTTPStatus from threading import Lock -from requests import HTTPError, RequestException import sonar.logging as log import sonar.platform as pf @@ -257,24 +256,18 @@ def sub_portfolios(self, full: bool = False) -> dict[str, Portfolio]: def add_reference_subportfolio(self, reference: Portfolio) -> object: ref = PortfolioReference.create(parent=self, reference=reference) - try: - if self.endpoint.version() >= (9, 3, 0): - self.post("views/add_portfolio", params={"portfolio": self.key, "reference": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) - else: - self.post("views/add_local_view", params={"key": self.key, "ref_key": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding reference subportfolio to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if self.endpoint.version() >= (9, 3, 0): + self.post("views/add_portfolio", params={"portfolio": self.key, "reference": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) + else: + self.post("views/add_local_view", params={"key": self.key, "ref_key": reference.key}, mute=(HTTPStatus.BAD_REQUEST,)) self._sub_portfolios.update({reference.key: ref}) return ref def add_standard_subportfolio(self, key: str, name: str, **kwargs) -> Portfolio: """Adds a subportfolio""" subp = Portfolio.create(endpoint=self.endpoint, key=key, name=name, parent=self, **kwargs) - try: - if self.endpoint.version() < (9, 3, 0): - self.post("views/add_sub_view", params={"key": self.key, "name": name, "subKey": key}, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding standard subportfolio to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if self.endpoint.version() < (9, 3, 0): + self.post("views/add_sub_view", params={"key": self.key, "name": name, "subKey": key}, mute=(HTTPStatus.BAD_REQUEST,)) self._sub_portfolios.update({subp.key: subp}) return subp @@ -412,13 +405,9 @@ def add_projects(self, projects: set[str]) -> Portfolio: try: self.post("views/add_project", params={"key": self.key, "project": key}, mute=(HTTPStatus.BAD_REQUEST,)) self._selection_mode[_SELECTION_MODE_MANUAL][key] = {c.DEFAULT_BRANCH} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding projects to {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND, HTTPStatus.BAD_REQUEST)) - if e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("%s: Project '%s' already in %s", util.error_msg(e), key, str(self)) - else: - Portfolio.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"Project '{key}' not found, can't be added to {str(self)}") + except exceptions.ObjectNotFound: + Portfolio.CACHE.pop(self) + raise return self def add_project_branches(self, project_key: str, branches: set[str]) -> Portfolio: @@ -431,16 +420,7 @@ def add_project_branches(self, project_key: str, branches: set[str]) -> Portfoli return self def add_project_branch(self, project_key: str, branch: str) -> bool: - try: - r = self.post("views/add_project_branch", params={"key": self.key, "project": project_key, "branch": branch}) - except HTTPError as e: - if e.response.status_code == HTTPStatus.NOT_FOUND: - Portfolio.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"Project '{project_key}' or branch '{branch}' not found, can't be added to {str(self)}") - if e.response.status_code == HTTPStatus.BAD_REQUEST: - log.warning("%s: Project '%s' branch '%s', already in %s", util.error_msg(e), project_key, branch, str(self)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding projects to {str(self)}") + r = self.post("views/add_project_branch", params={"key": self.key, "project": project_key, "branch": branch}) if project_key in self._selection_mode[_SELECTION_MODE_MANUAL]: self._selection_mode[_SELECTION_MODE_MANUAL][project_key].discard(c.DEFAULT_BRANCH) self._selection_mode[_SELECTION_MODE_MANUAL][project_key].add(branch) @@ -529,23 +509,20 @@ def add_application(self, app_key: str) -> bool: def add_application_branch(self, app_key: str, branch: str = c.DEFAULT_BRANCH) -> bool: app = applications.Application.get_object(self.endpoint, app_key) - try: - if branch == c.DEFAULT_BRANCH: - log.info("%s: Adding %s default branch", str(self), str(app)) - self.post("views/add_application", params={"portfolio": self.key, "application": app_key}, mute=(HTTPStatus.BAD_REQUEST,)) - else: - app_branch = app_branches.ApplicationBranch.get_object(app=app, branch_name=branch) - log.info("%s: Adding %s", str(self), str(app_branch)) - params = {"key": self.key, "application": app_key, "branch": branch} - self.post("views/add_application_branch", params=params, mute=(HTTPStatus.BAD_REQUEST,)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding app branch to {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + if branch == c.DEFAULT_BRANCH: + log.info("%s: Adding %s default branch", str(self), str(app)) + self.post("views/add_application", params={"portfolio": self.key, "application": app_key}, mute=(HTTPStatus.BAD_REQUEST,)) + else: + app_branch = app_branches.ApplicationBranch.get_object(app=app, branch_name=branch) + log.info("%s: Adding %s", str(self), str(app_branch)) + params = {"key": self.key, "application": app_key, "branch": branch} + self.post("views/add_application_branch", params=params, mute=(HTTPStatus.BAD_REQUEST,)) if app_key not in self._applications: self._applications[app_key] = [] self._applications[app_key].append(branch) return True - def add_subportfolio(self, key: str, name: str = None, by_ref: bool = False) -> object: + def add_subportfolio(self, key: str, name: str = None, by_ref: bool = False) -> Portfolio: """Adds a subportfolio to a portfolio, defined by key, name and by reference option""" log.info("Adding sub-portfolios to %s", str(self)) @@ -599,8 +576,7 @@ def get_project_list(self) -> list[str]: data = json.loads(self.get("api/measures/component_tree", params=params).text) nbr_projects = util.nbr_total_elements(data) proj_key_list += [comp["refKey"] for comp in data["components"]] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting projects list of {str(self)}", catch_all=True) + except exceptions.SonarException: break nbr_pages = util.nbr_pages(data) log.debug("Number of projects: %d - Page: %d/%d", nbr_projects, page, nbr_pages) @@ -644,7 +620,12 @@ def update(self, data: dict[str, str], recurse: bool) -> None: if subp_data.get("byReference", False): o_subp = Portfolio.get_object(self.endpoint, key) if o_subp.key not in key_list: - self.add_subportfolio(o_subp.key, name=o_subp.name, by_ref=True) + try: + self.add_subportfolio(o_subp.key, name=o_subp.name, by_ref=True) + except exceptions.SonarException as e: + # If the exception is that the portfolio already references, just pass + if "already references" not in e.message: + raise else: try: o_subp = Portfolio.get_object(self.endpoint, key) @@ -716,12 +697,7 @@ def exists(endpoint: pf.Platform, key: str) -> bool: def delete(endpoint: pf.Platform, key: str) -> bool: """Deletes a portfolio by its key""" - try: - p = Portfolio.get_object(endpoint, key) - p.delete() - return True - except exceptions.ObjectNotFound: - return False + return Portfolio.get_object(endpoint, key).delete() def import_config(endpoint: pf.Platform, config_data: types.ObjectJsonRepr, key_list: types.KeyList = None) -> bool: @@ -757,7 +733,7 @@ def import_config(endpoint: pf.Platform, config_data: types.ObjectJsonRepr, key_ try: o = Portfolio.get_object(endpoint, key) o.update(data=data, recurse=True) - except exceptions.ObjectNotFound as e: + except exceptions.SonarException as e: log.error(e.message) return True @@ -800,8 +776,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg exported_portfolios[k] = exp else: log.debug("Skipping export of %s, it's a standard sub-portfolio", str(p)) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"exporting {str(p)}, export will be empty for this portfolio", catch_all=True) + except exceptions.SonarException: exported_portfolios[k] = {} i += 1 if i % 10 == 0 or i == nb_portfolios: diff --git a/sonar/projects.py b/sonar/projects.py index 59af14392..65c9418dc 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -19,7 +19,7 @@ # """ - Abstraction of the SonarQube "project" concept +Abstraction of the SonarQube "project" concept """ @@ -35,7 +35,6 @@ from http import HTTPStatus from threading import Lock from requests import HTTPError, RequestException -import Levenshtein import sonar.logging as log import sonar.platform as pf @@ -141,9 +140,7 @@ class Project(components.Component): - """ - Abstraction of the SonarQube project concept - """ + """Abstraction of the SonarQube project concept""" CACHE = cache.Cache() SEARCH_KEY_FIELD = "key" @@ -184,16 +181,10 @@ def get_object(cls, endpoint: pf.Platform, key: str) -> Project: :param str key: Project key to search :raises ObjectNotFound: if project key not found :return: The Project - :rtype: Project """ - o = Project.CACHE.get(key, endpoint.local_url) - if o: + if o := Project.CACHE.get(key, endpoint.local_url): return o - try: - data = json.loads(endpoint.get(Project.API[c.READ], params={"component": key}).text) - except RequestException as e: - util.handle_error(e, f"Getting project {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key, f"Project key '{key}' not found") + data = json.loads(endpoint.get(Project.API[c.READ], params={"component": key}).text) return cls.load(endpoint, data["component"]) @classmethod @@ -207,8 +198,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> Project: :rtype: Project """ key = data["key"] - o = Project.CACHE.get(key, endpoint.local_url) - if not o: + if not (o := Project.CACHE.get(key, endpoint.local_url)): o = cls(endpoint, key) o.reload(data) return o @@ -223,11 +213,7 @@ def create(cls, endpoint: pf.Platform, key: str, name: str) -> Project: :return: The Project :rtype: Project """ - try: - endpoint.post(Project.API[c.CREATE], params={"project": key, "name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating project '{key}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(key, e.response.text) + endpoint.post(Project.API[c.CREATE], params={"project": key, "name": name}) o = cls(endpoint, key) o.name = name return o @@ -248,14 +234,12 @@ def refresh(self) -> Project: :raises ObjectNotFound: if project key not found :return: self - :rtype: Project """ try: data = json.loads(self.get(Project.api_for(c.READ, self.endpoint), params=self.api_params(c.READ)).text) - except RequestException as e: - util.handle_error(e, f"searching project {self.key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + raise return self.reload(data["component"]) def reload(self, data: types.ApiPayload) -> Project: @@ -266,10 +250,7 @@ def reload(self, data: types.ApiPayload) -> Project: :rtype: Project """ """Loads a project object with contents of an api/projects/search call""" - if self.sq_json is None: - self.sq_json = data - else: - self.sq_json.update(data) + self.sq_json = (self.sq_json or {}) | data self.name = data["name"] self._visibility = data["visibility"] if "lastAnalysisDate" in data: @@ -378,7 +359,7 @@ def delete(self) -> bool: :raises ObjectNotFound: If object to delete was not found in SonarQube :raises request.HTTPError: In all other cases of HTTP Errors - :return: Nothing + :return: Whether the operation succeeded """ loc = int(self.get_measure("ncloc", fallback="0")) log.info("Deleting %s, name '%s' with %d LoCs", str(self), self.name, loc) @@ -399,8 +380,7 @@ def binding(self) -> Optional[dict[str, str]]: try: resp = self.get("alm_settings/get_binding", params={"project": self.key}, mute=(HTTPStatus.NOT_FOUND,)) self._binding = {"has_binding": True, "binding": json.loads(resp.text)} - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting binding of {str(self)}", catch_http_errors=True, log_level=log.DEBUG) + except exceptions.SonarException: # Hack: 8.9 returns 404, 9.x returns 400 self._binding = {"has_binding": False} log.debug("%s binding = %s", str(self), str(self._binding.get("binding", None))) @@ -600,8 +580,8 @@ def ci(self) -> str: data = json.loads(self.get("project_analyses/search", params={"project": self.key, "ps": 1}).text)["analyses"] if len(data) > 0: self._ci, self._revision = data[0].get("detectedCI", "unknown"), data[0].get("revision", "unknown") - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting CI tool of {str(self)}", catch_all=True) + except exceptions.SonarException: + pass except KeyError: log.warning("KeyError, can't retrieve CI tool and revision") return self._ci @@ -624,7 +604,7 @@ def ai_code_fix(self) -> Optional[str]: if not global_setting or global_setting.value != "ENABLED_FOR_SOME_PROJECTS": return None if "isAiCodeFixEnabled" not in self.sq_json: - data = self.endpoint.get_paginated(api=Project.API[c.LIST], params={"filter": _PROJECT_QUALIFIER}, return_field="components") + data = self.endpoint.get_paginated(api=Project.API[c.LIST], return_field="components", filter=_PROJECT_QUALIFIER) p_data = next((p for p in data["components"] if p["key"] == self.key), None) if p_data: self.sq_json.update(p_data) @@ -684,10 +664,9 @@ def audit(self, audit_settings: types.ConfigSettings) -> list[Problem]: problems += self.__audit_branches(audit_settings) problems += self.__audit_pull_requests(audit_settings) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"auditing {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, str(e)) + raise return problems @@ -705,10 +684,13 @@ def export_zip(self, asynchronous: bool = False, timeout: int = 180) -> tuple[st ) try: resp = self.post("project_dump/export", params={"key": self.key}) + except exceptions.ObjectNotFound as e: + Project.CACHE.pop(self) + return f"FAILED/{e.message}", None + except exceptions.SonarException as e: + return f"FAILED/{e.message}", None except RequestException as e: util.handle_error(e, f"exporting zip of {str(self)}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(self.key, f"Project key '{self.key}' not found") return f"FAILED/{util.http_error_string(e.response.status_code)}", None except ConnectionError as e: return str(e), None @@ -727,6 +709,7 @@ def export_zip(self, asynchronous: bool = False, timeout: int = 180) -> tuple[st def import_zip(self, asynchronous: bool = False, timeout: int = 180) -> str: """Imports a project zip file in SonarQube + :param bool asynchronous: Whether to export the project asynchronously or not (if async, import_zip returns immediately) :param int timeout: timeout in seconds to complete the export operation :return: SUCCESS or FAILED with reason @@ -737,13 +720,13 @@ def import_zip(self, asynchronous: bool = False, timeout: int = 180) -> str: raise exceptions.UnsupportedOperation("Project import is only available with Enterprise and Datacenter Edition") try: resp = self.post("project_dump/import", params={"key": self.key}) - except RequestException as e: - if "Dump file does not exist" in util.sonar_error(e.response): + except exceptions.ObjectNotFound as e: + Project.CACHE.pop(self) + return f"FAILED/{e.message}" + except exceptions.SonarException as e: + if "Dump file does not exist" in e.message: return f"FAILED/{tasks.ZIP_MISSING}" - util.handle_error(e, f"importing zip of {str(self)} {mode}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.NOT_FOUND: - raise exceptions.ObjectNotFound(self.key, f"Project key '{self.key}' not found") - return f"FAILED/{util.http_error_string(e.response.status_code)}" + return f"FAILED/{e.message}" except ConnectionError as e: return f"FAILED/{str(e)}" @@ -772,7 +755,7 @@ def get_branches_and_prs(self, filters: dict[str, str]) -> Optional[dict[str, ob else: try: objects = {b: branches.Branch.get_object(concerned_object=self, branch_name=b) for b in br} - except (exceptions.ObjectNotFound, exceptions.UnsupportedOperation) as e: + except exceptions.SonarException as e: log.error(e.message) if pr: if "*" in pr: @@ -780,17 +763,15 @@ def get_branches_and_prs(self, filters: dict[str, str]) -> Optional[dict[str, ob else: try: objects.update({p: pull_requests.get_object(project=self, pull_request_key=p) for p in pr}) - except exceptions.ObjectNotFound as e: + except exceptions.SonarException as e: log.error(e.message) return objects def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) -> dict[str, object]: """Returns a project list of findings (issues and hotspots) - :param branch: branch name to consider, if any - :type branch: str, optional - :param pr: PR key to consider, if any - :type pr: str, optional + :param str branch: optional branch name to consider, if any + :param str pr: optional PR key to consider, if any :return: JSON of all findings, with finding key as key :rtype: dict{key: Finding} """ @@ -801,17 +782,9 @@ def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) - return {} log.info("Exporting findings for %s", str(self)) findings_list = {} - params = {"project": self.key} - if branch is not None: - params["branch"] = branch - elif pr is not None: - params["pullRequest"] = pr + params = util.remove_nones({"project": self.key, "branch": branch, "pullRequest": pr}) - try: - data = json.loads(self.get("projects/export_findings", params=params).text)["export_findings"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, "getting project findings", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - return {} + data = json.loads(self.get("projects/export_findings", params=params).text)["export_findings"] findings_conflicts = {"SECURITY_HOTSPOT": 0, "BUG": 0, "CODE_SMELL": 0, "VULNERABILITY": 0} nbr_findings = {"SECURITY_HOTSPOT": 0, "BUG": 0, "CODE_SMELL": 0, "VULNERABILITY": 0} for i in data: @@ -826,7 +799,8 @@ def get_findings(self, branch: Optional[str] = None, pr: Optional[str] = None) - i["pullRequest"] = pr nbr_findings[i["type"]] += 1 if i["type"] == "SECURITY_HOTSPOT": - findings_list[key] = hotspots.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) + if i.get("status", "") != "CLOSED": + findings_list[key] = hotspots.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) else: findings_list[key] = issues.get_object(endpoint=self.endpoint, key=key, data=i, from_export=True) for t in ("SECURITY_HOTSPOT", "BUG", "CODE_SMELL", "VULNERABILITY"): @@ -843,9 +817,8 @@ def get_hotspots(self, filters: Optional[dict[str, str]] = None) -> dict[str, ob if branches_or_prs is None: return super().get_hotspots(filters) findings_list = {} - for comp in branches_or_prs.values(): - if comp: - findings_list = {**findings_list, **comp.get_hotspots()} + for component in [comp for comp in branches_or_prs.values() if comp]: + findings_list |= component.get_hotspots() return findings_list def get_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, object]: @@ -853,9 +826,8 @@ def get_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, obje if branches_or_prs is None: return super().get_issues(filters) findings_list = {} - for comp in branches_or_prs.values(): - if comp: - findings_list = {**findings_list, **comp.get_issues()} + for component in [comp for comp in branches_or_prs.values() if comp]: + findings_list |= component.get_issues() return findings_list def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> dict[str, int]: @@ -866,9 +838,7 @@ def count_third_party_issues(self, filters: Optional[dict[str, str]] = None) -> return super().count_third_party_issues(filters) log.debug("Getting 3rd party issues on branches/PR") issue_counts = {} - for comp in branches_or_prs.values(): - if not comp: - continue + for comp in [co for co in branches_or_prs.values() if co]: log.debug("Getting 3rd party issues for %s", str(comp)) for k, total in comp.count_third_party_issues(filters).items(): if k not in issue_counts: @@ -954,12 +924,8 @@ def quality_gate(self) -> Optional[tuple[str, bool]]: :return: name of quality gate and whether it's the default :rtype: tuple(name, is_default) """ - try: - data = json.loads(self.get(api="qualitygates/get_by_project", params={"project": self.key}).text) - return data["qualityGate"]["name"], data["qualityGate"]["default"] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting quality gate of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN,)) - return "Error - Insufficient Permissions", False + data = json.loads(self.get(api="qualitygates/get_by_project", params={"project": self.key}).text) + return data["qualityGate"]["name"], data["qualityGate"]["default"] def webhooks(self) -> dict[str, webhooks.WebHook]: """ @@ -967,11 +933,7 @@ def webhooks(self) -> dict[str, webhooks.WebHook]: :rtype: dict{key: WebHook} """ log.debug("Getting %s webhooks", str(self)) - try: - return webhooks.get_list(endpoint=self.endpoint, project_key=self.key) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting webhooks of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) - return None + return webhooks.get_list(endpoint=self.endpoint, project_key=self.key) def links(self) -> Optional[list[dict[str, str]]]: """ @@ -980,8 +942,7 @@ def links(self) -> Optional[list[dict[str, str]]]: """ try: data = json.loads(self.get(api="project_links/search", params={"projectKey": self.key}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting links of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND)) + except exceptions.SonarException: return None link_list = None for link in data["links"]: @@ -1067,8 +1028,7 @@ def export(self, export_settings: types.ConfigSettings, settings_list: dict[str, try: hooks = webhooks.export(self.endpoint, self.key) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting webhooks of {str(self)}", catch_http_statuses=(HTTPStatus.FORBIDDEN,)) + except exceptions.SonarException: hooks = None if hooks is not None: json_data["webhooks"] = hooks @@ -1124,12 +1084,7 @@ def set_permissions(self, desired_permissions: types.ObjectJsonRepr) -> bool: :type desired_permissions: dict :return: Nothing """ - try: - self.permissions().set(desired_permissions) - return True - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting permissions of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - return False + self.permissions().set(desired_permissions) def set_links(self, desired_links: types.ObjectJsonRepr) -> bool: """Sets project links @@ -1140,15 +1095,11 @@ def set_links(self, desired_links: types.ObjectJsonRepr) -> bool: """ params = {"projectKey": self.key} ok = True - try: - for link in desired_links.get("links", {}): - if link.get("type", "") != "custom": - continue - params.update(link) - ok = ok and self.post("project_links/create", params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting links of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)) - return False + for link in desired_links.get("links", {}): + if link.get("type", "") != "custom": + continue + params.update(link) + ok = ok and self.post("project_links/create", params=params).ok return ok def set_quality_gate(self, quality_gate: str) -> bool: @@ -1159,17 +1110,8 @@ def set_quality_gate(self, quality_gate: str) -> bool: """ if quality_gate is None: return False - try: - _ = qualitygates.QualityGate.get_object(self.endpoint, quality_gate) - except exceptions.ObjectNotFound: - log.warning("Quality gate '%s' not found, can't set it for %s", quality_gate, str(self)) - return False - log.debug("Setting quality gate '%s' for %s", quality_gate, str(self)) - try: - return self.post("qualitygates/select", params={"projectKey": self.key, "gateName": quality_gate}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting permissions of {str(self)}", catch_all=True) - return False + _ = qualitygates.QualityGate.get_object(self.endpoint, quality_gate) + return self.post("qualitygates/select", params={"projectKey": self.key, "gateName": quality_gate}).ok def set_contains_ai_code(self, contains_ai_code: bool) -> bool: """Sets whether a project contains AI code @@ -1179,14 +1121,10 @@ def set_contains_ai_code(self, contains_ai_code: bool) -> bool: """ if self.endpoint.version() < (10, 7, 0) or self.endpoint.edition() == c.CE: return False - try: - api = "projects/set_contains_ai_code" - if self.endpoint.version() == (10, 7, 0): - api = "projects/set_ai_code_assurance" - return self.post(api, params={"project": self.key, "contains_ai_code": str(contains_ai_code).lower()}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting contains AI code of {str(self)}", catch_all=True) - return False + api = "projects/set_contains_ai_code" + if self.endpoint.version() == (10, 7, 0): + api = "projects/set_ai_code_assurance" + return self.post(api, params={"project": self.key, "contains_ai_code": str(contains_ai_code).lower()}).ok def set_quality_profile(self, language: str, quality_profile: str) -> bool: """Sets project quality profile for a given language @@ -1199,15 +1137,7 @@ def set_quality_profile(self, language: str, quality_profile: str) -> bool: log.warning("Quality profile '%s' in language '%s' does not exist, can't set it for %s", quality_profile, language, str(self)) return False log.debug("Setting quality profile '%s' of language '%s' for %s", quality_profile, language, str(self)) - try: - return self.post("qualityprofiles/add_project", params={"project": self.key, "qualityProfile": quality_profile, "language": language}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting quality profile of {str(self)}", catch_all=True) - errcode, msg = util.http_error_and_code(e) - if errcode == errcodes.OBJECT_NOT_FOUND: - Project.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.key, msg) - return False + return self.post("qualityprofiles/add_project", params={"project": self.key, "qualityProfile": quality_profile, "language": language}).ok def rename_main_branch(self, main_branch_name: str) -> bool: """Renames the project main branch @@ -1227,15 +1157,7 @@ def set_webhooks(self, webhook_data: types.ObjectJsonRepr) -> None: :param dict webhook_data: JSON describing the webhooks :return: Nothing """ - current_wh = self.webhooks() - current_wh_names = [wh.name for wh in current_wh.values()] - wh_map = {wh.name: k for k, wh in current_wh.items()} - # FIXME: Handle several webhooks with same name - for wh_name, wh in webhook_data.items(): - if wh_name in current_wh_names: - current_wh[wh_map[wh_name]].update(name=wh_name, **wh) - else: - webhooks.update(name=wh_name, endpoint=self.endpoint, project=self.key, **wh) + webhooks.import_config(self.endpoint, webhook_data, self.key) def set_settings(self, data: types.ObjectJsonRepr) -> None: """Sets project settings (webhooks, settings, new code period) @@ -1400,11 +1322,8 @@ def update(self, config: types.ObjectJsonRepr) -> None: except StopIteration: log.warning("No main branch defined in %s configuration", self) for branch_name, branch_data in branch_config.items(): - try: - branch = branches.Branch.get_object(self, branch_name) - branch.import_config(branch_data) - except exceptions.ObjectNotFound: - log.warning("Branch %s does not exist in %s, skipping update", branch_name, str(self)) + branch = branches.Branch.get_object(self, branch_name) + branch.import_config(branch_data) if "binding" in config: try: self.set_devops_binding(config["binding"]) @@ -1470,28 +1389,19 @@ def get_list(endpoint: pf.Platform, key_list: types.KeyList = None, threads: int def get_matching_list(endpoint: pf.Platform, pattern: str, threads: int = 8) -> dict[str, Project]: - """ + """Returns the list of projects whose keys are matching the pattern + :param Platform endpoint: Reference to the SonarQube platform :param str pattern: Regular expression to match project keys :return: the list of all projects matching the pattern - :rtype: dict{key: Project} """ - if not pattern or pattern == ".*": - return get_list(endpoint, threads=threads) + pattern = pattern or ".+" log.info("Listing projects matching regexp '%s'", pattern) matches = {k: v for k, v in get_list(endpoint, threads=threads).items() if re.match(rf"^{pattern}$", k)} log.info("%d project key matching regexp '%s'", len(matches), pattern) return matches -def __similar_keys(key1: str, key2: str, max_distance: int = 5) -> bool: - """Returns whether 2 project keys are similar""" - if key1 == key2: - return False - max_distance = min(len(key1) // 2, len(key2) // 2, max_distance) - return len(key2) >= 7 and (re.match(key2, key1)) or Levenshtein.distance(key1, key2, score_cutoff=6) <= max_distance - - def __audit_duplicates(projects_list: dict[str, Project], audit_settings: types.ConfigSettings) -> list[Problem]: """Audits for suspected duplicate projects""" if audit_settings.get(c.AUDIT_MODE_PARAM, "") == "housekeeper": @@ -1505,7 +1415,7 @@ def __audit_duplicates(projects_list: dict[str, Project], audit_settings: types. for key1, p in projects_list.items(): for key2 in projects_list: pair = " ".join(sorted([key1, key2])) - if __similar_keys(key1, key2, audit_settings.get("audit.projects.duplicates.maxDifferences", 4)) and pair not in pair_set: + if util.similar_strings(key1, key2, audit_settings.get("audit.projects.duplicates.maxDifferences", 4)) and pair not in pair_set: duplicates.append(Problem(get_rule(RuleId.PROJ_DUPLICATE), p, str(p), key2)) pair_set.add(pair) return duplicates @@ -1534,7 +1444,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) :param Platform endpoint: reference to the SonarQube platform :param ConfigSettings audit_settings: Configuration of audit - :returns: list of problems found + :return: list of problems found """ if not audit_settings.get("audit.projects", True): log.info("Auditing projects is disabled, audit skipped...") @@ -1555,7 +1465,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) try: problems += (proj_pbs := future.result(timeout=60)) write_q and write_q.put(proj_pbs) - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when auditing {str(futures_map[future])}.") current += 1 lvl = log.INFO if current % 10 == 0 or total - current < 10 else log.DEBUG @@ -1573,7 +1483,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg :param Platform endpoint: reference to the SonarQube platform :param ConfigSettings export_settings: Export parameters - :returns: list of projects settings + :return: list of projects settings """ write_q = kwargs.get("write_q", None) @@ -1594,7 +1504,7 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg exp_json = future.result(timeout=60) write_q and write_q.put(exp_json) results[futures_map[future].key] = exp_json - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when exporting {str(futures_map[future])}.") current += 1 lvl = log.INFO if current % 10 == 0 or total - current < 10 else log.DEBUG @@ -1604,12 +1514,12 @@ def export(endpoint: pf.Platform, export_settings: types.ConfigSettings, **kwarg return dict(sorted(results.items())) -def exists(key: str, endpoint: pf.Platform) -> bool: - """ - :param str key: project key to check +def exists(endpoint: pf.Platform, key: str) -> bool: + """Returns whether a project exists + :param Platform endpoint: reference to the SonarQube platform - :returns: whether the project exists - :rtype: bool + :param str key: project key to check + :return: whether the project exists """ try: Project.get_object(endpoint, key) @@ -1730,7 +1640,8 @@ def export_zips( return results -def import_zip(endpoint: pf.Platform, project_key: str, import_timeout: int = 30) -> tuple[str, str]: +def import_zip(endpoint: pf.Platform, project_key: str, import_timeout: int = 30) -> tuple[Project, str]: + """Imports a project zip file""" try: o_proj = Project.create(key=project_key, endpoint=endpoint, name=project_key) except exceptions.ObjectAlreadyExists: @@ -1775,15 +1686,18 @@ def import_zips(endpoint: pf.Platform, project_list: list[str], threads: int = 2 status = f"EXCEPTION {e}" statuses_count[status] = statuses_count[status] + 1 if status in statuses_count else 1 if o_proj is None: - o_proj = futures_map[future] - statuses[o_proj.key] = {} + proj_key = futures_map[future] + statuses[proj_key] = {"importStatus": status} else: - statuses[o_proj.key] = {"importDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S")} - statuses[o_proj.key]["importProjectUrl"] = o_proj.url() - statuses[o_proj.key]["importStatus"] = status + proj_key = o_proj.key + statuses[proj_key] = { + "importDate": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "importProjectUrl": o_proj.url(), + "importStatus": status, + } i += 1 - log.info("%d/%d imports (%d%%) - Latest: %s - %s", i, nb_projects, int(i * 100 / nb_projects), o_proj.key, status) + log.info("%d/%d imports (%d%%) - Latest: %s - %s", i, nb_projects, int(i * 100 / nb_projects), proj_key, status) log.info("%s", ", ".join([f"{k}:{v}" for k, v in statuses_count.items()])) return statuses diff --git a/sonar/pull_requests.py b/sonar/pull_requests.py index 171194b16..cea448bdf 100644 --- a/sonar/pull_requests.py +++ b/sonar/pull_requests.py @@ -19,7 +19,7 @@ # """ - Abstraction of the SonarQube "pull request" concept +Abstraction of the SonarQube "pull request" concept """ @@ -106,6 +106,16 @@ def api_params(self, op: Optional[str] = None) -> types.ApiParams: ops = {c.READ: {"project": self.concerned_object.key, "pullRequest": self.key}} return ops[op] if op and op in ops else ops[c.READ] + def get_findings(self, filters: Optional[types.ApiParams] = None) -> dict[str, object]: + """Returns a PR list of findings + + :return: dict of Findings, with finding key as key + :rtype: dict{key: Finding} + """ + if not filters: + return self.concerned_object.get_findings(pr=self.key) + return self.get_issues(filters) | self.get_hotspots(filters) + def get_object(pull_request_key: str, project: object, data: types.ApiPayload = None) -> Optional[PullRequest]: """Returns a PR object from a PR key and a project""" diff --git a/sonar/qualitygates.py b/sonar/qualitygates.py index e536c9515..65cd1506f 100644 --- a/sonar/qualitygates.py +++ b/sonar/qualitygates.py @@ -19,16 +19,14 @@ # """ - Abstraction of the SonarQube "quality gate" concept +Abstraction of the SonarQube "quality gate" concept """ from __future__ import annotations from typing import Union -from http import HTTPStatus import json -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq @@ -166,11 +164,7 @@ def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> QualityGate: @classmethod def create(cls, endpoint: pf.Platform, name: str) -> Union[QualityGate, None]: """Creates an empty quality gate""" - try: - endpoint.post(QualityGate.API[c.CREATE], params={"name": name}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating quality gate '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, e.response.text) + endpoint.post(QualityGate.API[c.CREATE], params={"name": name}) return cls.get_object(endpoint, name) def __str__(self) -> str: @@ -195,24 +189,19 @@ def projects(self) -> dict[str, projects.Project]: """ :raises ObjectNotFound: If Quality gate not found :return: The list of projects using this quality gate - :rtype: dict {: } """ if self._projects is not None: return self._projects - if self.endpoint.is_sonarcloud(): - params = {"gateId": self.key, "ps": 500} - else: - params = {"gateName": self.name, "ps": 500} + params = {"ps": 500} | {"gateId": self.key} if self.endpoint.is_sonarcloud() else {"gateName": self.name} page, nb_pages = 1, 1 self._projects = {} while page <= nb_pages: params["p"] = page try: resp = self.get(QualityGate.API["get_projects"], params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting projects of {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: QualityGate.CACHE.pop(self) - raise exceptions.ObjectNotFound(self.name, f"{str(self)} not found") + raise data = json.loads(resp.text) for prj in data["results"]: key = prj["key"] if "key" in prj else prj["id"] @@ -272,8 +261,7 @@ def set_conditions(self, conditions_list: list[str]) -> bool: (params["metric"], params["op"], params["error"]) = _decode_condition(cond) try: ok = ok and self.post("qualitygates/create_condition", params=params).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"adding condition '{cond}' to {str(self)}", catch_all=True) + except exceptions.SonarException: ok = False self._conditions = None self.conditions() @@ -308,14 +296,14 @@ def set_as_default(self) -> bool: """ params = {"id": self.key} if self.endpoint.is_sonarcloud() else {"name": self.name} try: - r = self.post("qualitygates/set_as_default", params=params) + ok = self.post("qualitygates/set_as_default", params=params).ok # Turn off default for all other quality gates except the current one for qg in get_list(self.endpoint).values(): qg.is_default = qg.name == self.name - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting {str(self)} as default quality gate") + except exceptions.SonarException: return False - return r.ok + else: + return ok def update(self, **data) -> bool: """Updates a quality gate diff --git a/sonar/qualityprofiles.py b/sonar/qualityprofiles.py index d9b998c33..1ec490cef 100644 --- a/sonar/qualityprofiles.py +++ b/sonar/qualityprofiles.py @@ -19,15 +19,14 @@ # """Abstraction of the SonarQube Quality Profile concept""" + from __future__ import annotations from typing import Optional import json from datetime import datetime -from http import HTTPStatus import concurrent.futures from threading import Lock -from requests import RequestException import requests.utils import sonar.logging as log @@ -131,11 +130,7 @@ def create(cls, endpoint: pf.Platform, name: str, language: str) -> Optional[Qua log.error("Language '%s' does not exist, quality profile creation aborted") return None log.debug("Creating quality profile '%s' of language '%s'", name, language) - try: - endpoint.post(QualityProfile.API[c.CREATE], params={"name": name, "language": language}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating quality profile '{language}:{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{language}:{name}", e.response.text) + endpoint.post(QualityProfile.API[c.CREATE], params={"name": name, "language": language}) return cls.read(endpoint=endpoint, name=name, language=language) @classmethod @@ -154,11 +149,7 @@ def clone(cls, endpoint: pf.Platform, name: str, language: str, original_qp_name raise exceptions.ObjectNotFound(f"{language}:{original_qp_name}", f"Quality profile {language}:{original_qp_name} not found") original_qp = l[0] log.debug("Found QP to clone: %s", str(original_qp)) - try: - endpoint.post("qualityprofiles/copy", params={"toName": name, "fromKey": original_qp.key}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"cloning {str(original_qp)} into name '{name}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(f"{language}:{name}", e.response.text) + endpoint.post("qualityprofiles/copy", params={"toName": name, "fromKey": original_qp.key}) return cls.read(endpoint=endpoint, name=name, language=language) @classmethod @@ -276,7 +267,7 @@ def rules(self, use_cache: bool = False) -> dict[str, rules.Rule]: # Assume nobody changed QP during execution return self._rules rule_key_list = rules.search_keys(self.endpoint, activation="true", qprofile=self.key, s="key", languages=self.language) - self._rules = {k: rules.get_object(self.endpoint, k) for k in rule_key_list} + self._rules = {k: rules.Rule.get_object(self.endpoint, k) for k in rule_key_list} return self._rules def activate_rule(self, rule_key: str, severity: Optional[str] = None, **params) -> bool: @@ -293,14 +284,13 @@ def activate_rule(self, rule_key: str, severity: Optional[str] = None, **params) if len(params) > 0: api_params["params"] = ";".join([f"{k}={v}" for k, v in params.items()]) try: - r = self.post("qualityprofiles/activate_rule", params=api_params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"activating rule {rule_key} in {str(self)}", catch_all=True) + ok = self.post("qualityprofiles/activate_rule", params=api_params).ok + except exceptions.SonarException: return False if self._rules is None: self._rules = {} - self._rules[rule_key] = rules.get_object(self.endpoint, rule_key) - return r.ok + self._rules[rule_key] = rules.Rule.get_object(self.endpoint, rule_key) + return ok def deactivate_rule(self, rule_key: str) -> bool: """Deactivates a rule in the quality profile @@ -311,11 +301,9 @@ def deactivate_rule(self, rule_key: str) -> bool: """ log.debug("Deactivating rule %s in %s", rule_key, str(self)) try: - r = self.post("qualityprofiles/deactivate_rule", params={"key": self.key, "rule": rule_key}) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deactivating rule {rule_key} in {str(self)}", catch_all=True) + return self.post("qualityprofiles/deactivate_rule", params={"key": self.key, "rule": rule_key}).ok + except exceptions.SonarException: return False - return r.ok def deactivate_rules(self, ruleset: list[str]) -> bool: """Deactivates a list of rules in the quality profile @@ -324,7 +312,7 @@ def deactivate_rules(self, ruleset: list[str]) -> bool: """ ok = True for r_key in ruleset: - ok = ok and self.deactivate_rule(rule_key=r_key) + ok = self.deactivate_rule(rule_key=r_key) and ok self.rules(use_cache=False) return ok @@ -441,7 +429,7 @@ def rule_impacts(self, rule_key: str, substitute_with_default: bool = True) -> d :return: The severities of the rule in the quality profile :rtype: dict[str, str] """ - return rules.get_object(self.endpoint, rule_key).impacts(self.key, substitute_with_default=substitute_with_default) + return rules.Rule.get_object(self.endpoint, rule_key).impacts(self.key, substitute_with_default=substitute_with_default) def __process_rules_diff(self, rule_set: dict[str:str]) -> dict[str:str]: diff_rules = {} @@ -800,8 +788,7 @@ def get_object(endpoint: pf.Platform, name: str, language: str) -> Optional[Qual :return: The quality profile object, of None if not found """ get_list(endpoint) - o = QualityProfile.CACHE.get(name, language, endpoint.local_url) - if not o: + if not (o := QualityProfile.CACHE.get(name, language, endpoint.local_url)): raise exceptions.ObjectNotFound(name, message=f"Quality Profile '{language}:{name}' not found") return o diff --git a/sonar/rules.py b/sonar/rules.py index e4e02bc6c..510ecc9c5 100644 --- a/sonar/rules.py +++ b/sonar/rules.py @@ -20,16 +20,15 @@ # """ - Abstraction of the SonarQube "rule" concept +Abstraction of the SonarQube "rule" concept """ + from __future__ import annotations import json import concurrent.futures from threading import Lock from typing import Optional -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq @@ -188,15 +187,16 @@ def __init__(self, endpoint: platform.Platform, key: str, data: types.ApiPayload @classmethod def get_object(cls, endpoint: platform.Platform, key: str) -> Rule: - """Returns a rule object from the cache or from the platform itself""" - o = Rule.CACHE.get(key, endpoint.local_url) - if o: + """Returns a rule object from it key, taken from the cache or from the platform itself + + :param Platform endpoint: The SonarQube reference + :param str key: The rule key + :return: The Rule object corresponding to the input rule key + :raises: ObjectNotFound if rule does not exist + """ + if o := Rule.CACHE.get(key, endpoint.local_url): return o - try: - r = endpoint.get(Rule.API[c.READ], params={"key": key, "actives": "true"}) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"getting rule {key}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(key=key, message=f"Rule key '{key}' does not exist") + r = endpoint.get(Rule.API[c.READ], params={"key": key, "actives": "true"}) return Rule(endpoint=endpoint, key=key, data=json.loads(r.text)["rule"]) @classmethod @@ -257,10 +257,9 @@ def refresh(self, use_cache: bool = True) -> bool: try: data = json.loads(self.get(Rule.API[c.READ], params={"key": self.key, "actives": "true"}).text) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"Reading {self}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) + except exceptions.ObjectNotFound: Rule.CACHE.pop(self) - raise exceptions.ObjectNotFound(key=self.key, message=f"{self} does not exist") + raise self.sq_json.update(data["rule"]) self.sq_json["actives"] = data["actives"].copy() return True @@ -410,8 +409,8 @@ def search_keys(endpoint: platform.Platform, **params) -> list[str]: data = json.loads(endpoint.get(Rule.API[c.SEARCH], params=new_params).text) nbr_pages = utilities.nbr_pages(data) rule_list += [r[Rule.SEARCH_KEY_FIELD] for r in data[Rule.SEARCH_RETURN_FIELD]] - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, "searching rules", catch_all=True) + except exceptions.SonarException: + pass return rule_list @@ -449,18 +448,6 @@ def get_list(endpoint: platform.Platform, use_cache: bool = True, **params) -> d return rule_list -def get_object(endpoint: platform.Platform, key: str) -> Optional[Rule]: - """Returns a Rule object from its key - :return: The Rule object corresponding to the input rule key, or None if not found - :param str key: The rule key - :rtype: Rule or None - """ - try: - return Rule.get_object(key=key, endpoint=endpoint) - except exceptions.ObjectNotFound: - return None - - def export(endpoint: platform.Platform, export_settings: types.ConfigSettings, **kwargs) -> types.ObjectJsonRepr: """Returns a JSON export of all rules""" log.info("Exporting rules") diff --git a/sonar/settings.py b/sonar/settings.py index c5ba0a730..d64deb289 100644 --- a/sonar/settings.py +++ b/sonar/settings.py @@ -18,15 +18,13 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # """ - Abstraction of the SonarQube setting concept +Abstraction of the SonarQube setting concept """ from __future__ import annotations import re import json from typing import Union, Optional -from http import HTTPStatus -from requests import HTTPError, RequestException import sonar.logging as log import sonar.platform as pf @@ -115,6 +113,7 @@ r"^sonar\.auth\..*\.organizations$", r"^sonar\.azureresourcemanager\.file\.identifier$", r"^sonar\.java\.jvmframeworkconfig\.file\.patterns$", + r"^sonar\.auth\.gitlab\.allowedGroups", ) VALID_SETTINGS = set() @@ -132,6 +131,7 @@ class Setting(sqobject.SqObject): c.LIST: "settings/list_definitions", "NEW_CODE_GET": "new_code_periods/show", "NEW_CODE_SET": "new_code_periods/set", + "MQR_MODE": "v2/clean-code-policy/mode", } def __init__(self, endpoint: pf.Platform, key: str, component: object = None, data: types.ApiPayload = None) -> None: @@ -154,19 +154,7 @@ def read(cls, key: str, endpoint: pf.Platform, component: object = None) -> Sett o = Setting.CACHE.get(key, component, endpoint.local_url) if o: return o - if key == NEW_CODE_PERIOD and not endpoint.is_sonarcloud(): - params = get_component_params(component, name="project") - data = json.loads(endpoint.get(Setting.API["NEW_CODE_GET"], params=params).text) - else: - if key == NEW_CODE_PERIOD: - key = "sonar.leak.period.type" - params = get_component_params(component) - params.update({"keys": key}) - data = json.loads(endpoint.get(Setting.API[c.GET], params=params, with_organization=(component is None)).text)["settings"] - if not endpoint.is_sonarcloud() and len(data) > 0: - data = data[0] - else: - data = {"inherited": True} + data = get_settings_data(endpoint, key, component) return Setting.load(key=key, endpoint=endpoint, data=data, component=component) @classmethod @@ -214,6 +202,8 @@ def reload(self, data: types.ApiPayload) -> None: self.multi_valued = data.get("multiValues", False) if self.key == NEW_CODE_PERIOD: self.value = new_code_to_string(data) + elif self.key == MQR_ENABLED: + self.value = data.get("mode", "MQR") != "STANDARD_EXPERIENCE" elif self.key == COMPONENT_VISIBILITY: self.value = data.get("visibility", None) elif self.key == "sonar.login.message": @@ -226,6 +216,10 @@ def reload(self, data: types.ApiPayload) -> None: self.value = util.DEFAULT self.__reload_inheritance(data) + def refresh(self) -> None: + """Reads the setting value on SonarQube""" + self.reload(get_settings_data(self.endpoint, self.key, self.component)) + def __hash__(self) -> int: """Returns object unique ID""" return hash((self.key, self.component.key if self.component else None, self.base_url())) @@ -241,10 +235,17 @@ def set(self, value: any) -> bool: log.debug("%s set to '%s'", str(self), str(value)) if not self.is_settable(): log.error("Setting '%s' does not seem to be a settable setting, trying to set anyway...", str(self)) - if value is None or value == "" or (self.key == "sonar.autodetect.ai.code" and value is True): - return self.endpoint.reset_setting(self.key) + return False + if value is None or value == "" or (self.key == "sonar.autodetect.ai.code" and value is True and self.endpoint.version() < (2025, 2, 0)): + return self.reset() + if self.key == MQR_ENABLED: + if ok := self.patch(Setting.API["MQR_MODE"], params={"mode": "STANDARD_EXPERIENCE" if not value else "MQR"}).ok: + self.value = value + return ok if self.key in (COMPONENT_VISIBILITY, PROJECT_DEFAULT_VISIBILITY): - return set_visibility(endpoint=self.endpoint, component=self.component, visibility=value) + if ok := set_visibility(endpoint=self.endpoint, component=self.component, visibility=value): + self.value = value + return ok # Hack: Up to 9.4 cobol settings are comma separated mono-valued, in 9.5+ they are multi-valued if self.endpoint.version() > (9, 4, 0) or not self.key.startswith("sonar.cobol"): @@ -256,38 +257,25 @@ def set(self, value: any) -> bool: return False log.debug("Setting %s to value '%s'", str(self), str(value)) - params = {"key": self.key, "component": self.component.key if self.component else None} - untransformed_value = value - if isinstance(value, list): - if isinstance(value[0], str): - params["values"] = value - else: - params["fieldValues"] = [json.dumps(v) for v in value] - elif isinstance(value, bool): - params["value"] = str(value).lower() - else: - pname = "values" if self.multi_valued else "value" - params[pname] = value + params = {"key": self.key, "component": self.component.key if self.component else None} | encode(self, value) try: - r = self.post(Setting.API[c.CREATE], params=params) - self.value = untransformed_value - return r.ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting setting '{self.key}' of {str(self.component)}", catch_all=True) + if ok := self.post(Setting.API[c.CREATE], params=params).ok: + self.value = value + except exceptions.SonarException: return False + else: + return ok def reset(self) -> bool: log.info("Resetting %s", str(self)) - params = {"keys": self.key} - if self.component: - params["component"] = self.component.key + params = {"keys": self.key} | {} if not self.component else {"component": self.component.key} try: - r = self.post("settings/reset", params=params) - self.value = None - return r.ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"resetting setting '{self.key}' of {str(self.component)}", catch_all=True) + ok = self.post("settings/reset", params=params).ok + self.refresh() + except exceptions.SonarException: return False + else: + return ok def to_json(self, list_as_csv: bool = True) -> types.ObjectJsonRepr: val = self.value @@ -300,7 +288,7 @@ def to_json(self, list_as_csv: bool = True) -> types.ObjectJsonRepr: break if val is None: val = "" - log.debug("JSON of %s = %s", self, {self.key: val}) + # log.debug("JSON of %s = %s", self, {self.key: val}) return {self.key: val} def definition(self) -> Optional[dict[str, str]]: @@ -443,7 +431,7 @@ def get_bulk( o = get_new_code_period(endpoint, component) settings_dict[o.key] = o VALID_SETTINGS.update(set(settings_dict.keys())) - VALID_SETTINGS.update({"sonar.scm.provider"}) + VALID_SETTINGS.update({"sonar.scm.provider", MQR_ENABLED}) return settings_dict @@ -479,17 +467,11 @@ def get_new_code_period(endpoint: pf.Platform, project_or_branch: object) -> Set def set_new_code_period(endpoint: pf.Platform, nc_type: str, nc_value: str, project_key: str = None, branch: str = None) -> bool: """Sets the new code period at global level or for a project""" log.debug("Setting new code period for project '%s' branch '%s' to value '%s = %s'", str(project_key), str(branch), str(nc_type), str(nc_value)) - try: - if endpoint.is_sonarcloud(): - ok = endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period.type", "value": nc_type, "project": project_key}).ok - ok = ok and endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period", "value": nc_value, "project": project_key}).ok - else: - ok = endpoint.post(Setting.API["NEW_CODE_SET"], params={"type": nc_type, "value": nc_value, "project": project_key, "branch": branch}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting new code period of {project_key}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(f"Can't set project new code period: {e.response.text}") - return False + if endpoint.is_sonarcloud(): + ok = endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period.type", "value": nc_type, "project": project_key}).ok + ok = ok and endpoint.post(Setting.API[c.CREATE], params={"key": "sonar.leak.period", "value": nc_value, "project": project_key}).ok + else: + ok = endpoint.post(Setting.API["NEW_CODE_SET"], params={"type": nc_type, "value": nc_value, "project": project_key, "branch": branch}).ok return ok @@ -511,23 +493,16 @@ def get_visibility(endpoint: pf.Platform, component: object) -> str: def set_visibility(endpoint: pf.Platform, visibility: str, component: object = None) -> bool: """Sets the platform global default visibility or component visibility""" - try: - if component: - log.debug("Setting setting '%s' of %s to value '%s'", COMPONENT_VISIBILITY, str(component), visibility) - return endpoint.post("projects/update_visibility", params={"project": component.key, "visibility": visibility}).ok - else: - log.debug("Setting setting '%s' to value '%s'", PROJECT_DEFAULT_VISIBILITY, str(visibility)) - return endpoint.post("projects/update_default_visibility", params={"projectVisibility": visibility}).ok - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting comp or global visibility of {str(component)}", catch_all=True) - if isinstance(e, HTTPError) and e.response.status_code == HTTPStatus.BAD_REQUEST: - raise exceptions.UnsupportedOperation(f"Can't set comp or global visibility of {str(component)}: {e.response.text}") - return False + if component: + log.debug("Setting setting '%s' of %s to value '%s'", COMPONENT_VISIBILITY, str(component), visibility) + return endpoint.post("projects/update_visibility", params={"project": component.key, "visibility": visibility}).ok + else: + log.debug("Setting setting '%s' to value '%s'", PROJECT_DEFAULT_VISIBILITY, str(visibility)) + return endpoint.post("projects/update_default_visibility", params={"projectVisibility": visibility}).ok def set_setting(endpoint: pf.Platform, key: str, value: any, component: object = None) -> bool: """Sets a setting to a particular value""" - try: log.debug("Setting %s with value %s (for component %s)", key, value, component) s = get_object(endpoint=endpoint, key=key, component=component) @@ -535,13 +510,11 @@ def set_setting(endpoint: pf.Platform, key: str, value: any, component: object = log.warning("Setting '%s' does not exist on target platform, it cannot be set", key) return False s.set(value) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"setting setting '{key}' of {str(component)}", catch_all=True) - return False - except exceptions.UnsupportedOperation as e: + except exceptions.SonarException as e: log.error("Setting '%s' cannot be set: %s", key, e.message) return False - return True + else: + return True def decode(setting_key: str, setting_value: any) -> any: @@ -562,6 +535,17 @@ def decode(setting_key: str, setting_value: any) -> any: return setting_value +def encode(setting: Setting, setting_value: any) -> dict[str, str]: + """Encodes the params to pass to api/settings/set according to setting value type""" + if isinstance(setting_value, list): + params = {"values": setting_value} if isinstance(setting_value[0], str) else {"fieldValues": [json.dumps(v) for v in setting_value]} + elif isinstance(setting_value, bool): + params = {"value": str(setting_value).lower()} + else: + params = {"values" if setting.multi_valued else "value": setting_value} + return params + + def reset_setting(endpoint: pf.Platform, setting_key: str, project: Optional[object] = None) -> bool: """Resets a setting to its default""" return get_object(endpoint=endpoint, key=setting_key, component=project).reset() @@ -575,3 +559,29 @@ def get_component_params(component: object, name: str = "component") -> types.Ap return {name: component.project.key, "branch": component.key} else: return {name: component.key} + + +def get_settings_data(endpoint: pf.Platform, key: str, component: Optional[object]) -> types.ApiPayload: + """Reads a setting data with different API depending on setting key + + :param Platform endpoint: The SonarQube Platform object + :param str key: The setting key + :param object component: The component (Project) concerned, optional + :return: The returned API data + """ + if key == NEW_CODE_PERIOD and not endpoint.is_sonarcloud(): + params = get_component_params(component, name="project") + data = json.loads(endpoint.get(Setting.API["NEW_CODE_GET"], params=params).text) + elif key == MQR_ENABLED: + data = json.loads(endpoint.get(Setting.API["MQR_MODE"]).text) + else: + if key == NEW_CODE_PERIOD: + key = "sonar.leak.period.type" + params = get_component_params(component) + params.update({"keys": key}) + data = json.loads(endpoint.get(Setting.API[c.GET], params=params, with_organization=(component is None)).text)["settings"] + if not endpoint.is_sonarcloud() and len(data) > 0: + data = data[0] + else: + data = {"inherited": True} + return data diff --git a/sonar/sif.py b/sonar/sif.py index 1d2c5aaec..a9f224e7d 100644 --- a/sonar/sif.py +++ b/sonar/sif.py @@ -19,7 +19,7 @@ # """ - Abstraction of the SonarQube System Info File (or Support Info File) concept +Abstraction of the SonarQube System Info File (or Support Info File) concept """ diff --git a/sonar/sif_node.py b/sonar/sif_node.py index 370d759e7..2e6a02984 100644 --- a/sonar/sif_node.py +++ b/sonar/sif_node.py @@ -19,7 +19,7 @@ # """ - Node audit +Node audit """ diff --git a/sonar/sqobject.py b/sonar/sqobject.py index 8978d17b9..bb28a9c80 100644 --- a/sonar/sqobject.py +++ b/sonar/sqobject.py @@ -19,7 +19,7 @@ # """ - Abstraction of the SonarQube general object concept +Abstraction of the SonarQube general object concept """ @@ -29,7 +29,6 @@ from http import HTTPStatus import concurrent.futures import requests -from requests import RequestException import sonar.logging as log from sonar.util import types, cache @@ -40,10 +39,12 @@ class SqObject(object): """Abstraction of Sonar objects""" - CACHE = cache.Cache + CACHE = cache.Cache() API = {c.SEARCH: None} def __init__(self, endpoint: object, key: str) -> None: + if not self.__class__.CACHE: + self.__class__.CACHE.set_class(self.__class__) self.key = key #: Object unique key (unique in its class) self.endpoint = endpoint #: Reference to the SonarQube platform self.concerned_object = None @@ -113,7 +114,11 @@ def get( Typically, Error 404 Not found may be expected sometimes so this can avoid logging an error for 404 :return: The request response """ - return self.endpoint.get(api=api, params=params, data=data, mute=mute, **kwargs) + try: + return self.endpoint.get(api=api, params=params, data=data, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def post( self, @@ -131,7 +136,11 @@ def post( :type mute: tuple, optional :return: The request response """ - return self.endpoint.post(api=api, params=params, mute=mute, **kwargs) + try: + return self.endpoint.post(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def patch( self, @@ -149,7 +158,11 @@ def patch( :type mute: tuple, optional :return: The request response """ - return self.endpoint.patch(api=api, params=params, mute=mute, **kwargs) + try: + return self.endpoint.patch(api=api, params=params, mute=mute, **kwargs) + except exceptions.ObjectNotFound: + self.__class__.CACHE.clear() + raise def delete(self) -> bool: """Deletes an object, returns whether the operation succeeded""" @@ -159,9 +172,6 @@ def delete(self) -> bool: if ok: log.info("Removing from %s cache", str(self.__class__.__name__)) self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") except (AttributeError, KeyError): raise exceptions.UnsupportedOperation(f"Can't delete {self.__class__.__name__.lower()}s") return ok @@ -176,15 +186,14 @@ def set_tags(self, tags: list[str]) -> bool: tags = list(set(utilities.csv_to_list(tags))) log.info("Settings tags %s to %s", tags, str(self)) try: - r = self.post(self.__class__.API[c.SET_TAGS], params={**self.api_params(c.SET_TAGS), "tags": utilities.list_to_csv(tags)}) - if r.ok: + if ok := self.post(self.__class__.API[c.SET_TAGS], params={**self.api_params(c.SET_TAGS), "tags": utilities.list_to_csv(tags)}).ok: self._tags = sorted(tags) - except (ConnectionError, RequestException) as e: - utilities.handle_error(e, f"setting tags of {str(self)}", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) + except exceptions.SonarException: return False except (AttributeError, KeyError): raise exceptions.UnsupportedOperation(f"Can't set tags on {self.__class__.__name__.lower()}s") - return r.ok + else: + return ok def get_tags(self, **kwargs) -> list[str]: """Returns object tags""" @@ -199,7 +208,7 @@ def get_tags(self, **kwargs) -> list[str]: data = json.loads(self.get(api, params=self.get_tags_params()).text) self.sq_json.update(data["component"]) self._tags = self.sq_json["tags"] - except (ConnectionError, RequestException): + except exceptions.SonarException: self._tags = [] return self._tags @@ -211,7 +220,7 @@ def __get(endpoint: object, api: str, params: types.ApiParams) -> requests.Respo def __load(endpoint: object, object_class: any, data: types.ObjectJsonRepr) -> dict[str, object]: key_field = object_class.SEARCH_KEY_FIELD - if object_class.__name__ in ("Portfolio", "Group", "QualityProfile", "User", "Application", "Project", "Organization"): + if object_class.__name__ in ("Portfolio", "Group", "QualityProfile", "User", "Application", "Project", "Organization", "WebHook"): return {obj[key_field]: object_class.load(endpoint=endpoint, data=obj) for obj in data} elif object_class.__name__ in ("Rule"): return {obj[key_field]: object_class.load(endpoint=endpoint, key=obj[key_field], data=obj) for obj in data} diff --git a/sonar/tasks.py b/sonar/tasks.py index 910492356..aa1c9a312 100644 --- a/sonar/tasks.py +++ b/sonar/tasks.py @@ -19,18 +19,18 @@ # """Abstraction of the SonarQube background task concept""" + from typing import Optional import time import datetime import json import re -from requests import RequestException - import sonar.logging as log import sonar.sqobject as sq import sonar.platform as pf +from sonar import exceptions import sonar.utilities as util from sonar.audit.rules import get_rule, RuleId from sonar.audit.problem import Problem @@ -458,9 +458,8 @@ def search(endpoint: pf.Platform, only_current: bool = False, component_key: str try: data = json.loads(endpoint.get("ce/activity", params=params).text) return [Task(endpoint=endpoint, task_id=t["id"], data=t) for t in data["tasks"]] - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting background tasks of component {component_key}", catch_all=True) - return [] + except exceptions.SonarException: + return [] def search_all_last(endpoint: pf.Platform) -> list[Task]: diff --git a/sonar/tokens.py b/sonar/tokens.py index ccd95e03e..9218079e9 100644 --- a/sonar/tokens.py +++ b/sonar/tokens.py @@ -26,14 +26,11 @@ import json import datetime -from http import HTTPStatus -from requests import RequestException import sonar.logging as log import sonar.sqobject as sq import sonar.platform as pf import sonar.utilities as util -from sonar import exceptions from sonar.util import types, cache, constants as c from sonar.audit.problem import Problem from sonar.audit.rules import get_rule, RuleId @@ -69,11 +66,7 @@ def create(cls, endpoint: pf.Platform, login: str, name: str) -> UserToken: :param login: User for which the token must be created :param name: Token name """ - try: - data = json.loads(endpoint.post(UserToken.API[c.CREATE], {"name": name, "login": login}).text) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating token '{name}' for user '{login}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(name, e.response.text) + data = json.loads(endpoint.post(UserToken.API[c.CREATE], {"name": name, "login": login}).text) return UserToken(endpoint=endpoint, login=data["login"], json_data=data, name=name) def __str__(self) -> str: diff --git a/sonar/users.py b/sonar/users.py index 667b7dc53..2697fa685 100644 --- a/sonar/users.py +++ b/sonar/users.py @@ -19,6 +19,7 @@ # """Abstraction of the SonarQube User concept""" + from __future__ import annotations import concurrent.futures @@ -26,7 +27,6 @@ import datetime as dt import json -from http import HTTPStatus from requests import RequestException import sonar.logging as log @@ -125,11 +125,7 @@ def create(cls, endpoint: pf.Platform, login: str, name: str, is_local: bool = T params = {"login": login, "local": str(is_local).lower(), "name": name} if is_local: params["password"] = password if password else login - try: - endpoint.post(User.api_for(c.CREATE, endpoint), params=params) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"creating user '{login}'", catch_http_statuses=(HTTPStatus.BAD_REQUEST,)) - raise exceptions.ObjectAlreadyExists(login, util.sonar_error(e.response)) + endpoint.post(User.api_for(c.CREATE, endpoint), params=params) return cls.get_object(endpoint=endpoint, login=login) @classmethod @@ -142,13 +138,11 @@ def get_object(cls, endpoint: pf.Platform, login: str) -> User: :return: The user object :rtype: User """ - o = User.CACHE.get(login, endpoint.local_url) - if o: + if o := User.CACHE.get(login, endpoint.local_url): return o log.debug("Getting user '%s'", login) - for k, o in search(endpoint, params={"q": login}).items(): - if k == login: - return o + if user := next((o for k, o in search(endpoint, params={"q": login}).items() if k == login), None): + return user raise exceptions.ObjectNotFound(login, f"User '{login}' not found") @classmethod @@ -158,19 +152,15 @@ def get_object_by_id(cls, endpoint: pf.Platform, id: str) -> User: :param endpoint: Reference to the SonarQube platform :param id: User id :raises ObjectNotFound: if id not found - :raises UnsuppoertedOperation: If SonarQube version < 10.4 + :raises UnsupportedOperation: If SonarQube version < 10.4 :return: The user object :rtype: User """ if endpoint.version() < c.USER_API_V2_INTRO_VERSION: raise exceptions.UnsupportedOperation("Get by ID is an APIv2 features, staring from SonarQube 10.4") log.debug("Getting user id '%s'", id) - try: - data = json.loads(endpoint.get(f"/api/v2/users-management/users/{id}", mute=()).text) - return cls.load(endpoint, data) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"getting user id '{id}'", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(id, f"User id '{id}' not found") + data = json.loads(endpoint.get(f"/api/v2/users-management/users/{id}", mute=()).text) + return cls.load(endpoint, data) @classmethod def api_for(cls, op: str, endpoint: object) -> Optional[str]: @@ -357,11 +347,11 @@ def delete(self) -> bool: else: ok = self.post(api=User.API_V1[c.DELETE], params=self.api_params(c.DELETE)).ok if ok: - log.info("Removing from %s cache", str(self.__class__.__name__)) - self.__class__.CACHE.pop(self) - except (ConnectionError, RequestException) as e: - util.handle_error(e, f"deleting {str(self)}", catch_http_statuses=(HTTPStatus.NOT_FOUND,)) - raise exceptions.ObjectNotFound(self.key, f"{str(self)} not found") + log.info("Removing from %s cache", str(User.__name__)) + User.CACHE.pop(self) + except exceptions.ObjectNotFound: + User.CACHE.pop(self) + raise return ok def api_params(self, op: str = c.GET) -> types.ApiParams: @@ -525,7 +515,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings, **kwargs) for future in concurrent.futures.as_completed(futures): try: problems += future.result(timeout=60) - except (TimeoutError, RequestException) as e: + except (TimeoutError, RequestException, exceptions.SonarException) as e: log.error(f"Exception {str(e)} when auditing {str(futures_map[future])}.") "write_q" in kwargs and kwargs["write_q"].put(problems) log.info("--- Auditing users: END ---") diff --git a/sonar/util/cache.py b/sonar/util/cache.py index cc010a728..9ada5575d 100644 --- a/sonar/util/cache.py +++ b/sonar/util/cache.py @@ -18,22 +18,34 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Cache manager """ +"""Cache manager""" from typing import Optional +from sonar import logging as log class Cache(object): """Abstract cache implementation""" def __init__(self) -> None: + """Constructor""" self.objects = {} + self.object_class = None def __len__(self) -> int: """Returns size of cache""" return len(self.objects) def __str__(self) -> str: + """string repr of Cache""" + return "'undefined class' cache" if not self.object_class else f"'{self.object_class.__name__}' cache" + + def set_class(self, object_class: object) -> None: + """Defines the class the cache is for""" + self.object_class = object_class + + def contents(self) -> str: + """Returns the cache contents as a string""" return ", ".join([str(o) for o in self.objects.values()]) def put(self, obj: object) -> object: @@ -41,13 +53,19 @@ def put(self, obj: object) -> object: h = hash(obj) if h not in self.objects: self.objects[h] = obj + else: + log.debug("%s already in cache, can't be added again", obj) + # log.debug("PUT %s: %s", self, self.contents()) return self.objects[h] def get(self, *args) -> Optional[object]: + # log.debug("GET %s: %s", self, self.contents()) return self.objects.get(hash(args), None) def pop(self, obj: object) -> Optional[object]: - return self.objects.pop(hash(obj), None) + o = self.objects.pop(hash(obj), None) + log.debug("POP %s: %s", self, self.contents()) + return o def values(self) -> list[object]: return list(self.objects.values()) @@ -59,4 +77,6 @@ def items(self) -> dict[int, object]: return self.objects.items() def clear(self) -> None: + """Clears a cache""" + # log.info("Clearing %s", self) self.objects = {} diff --git a/sonar/util/cache_helper.py b/sonar/util/cache_helper.py index ed81b82ad..cb8147467 100644 --- a/sonar/util/cache_helper.py +++ b/sonar/util/cache_helper.py @@ -18,7 +18,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Cache manager """ +"""Cache manager""" from typing import Optional from sonar import logging as log diff --git a/sonar/util/constants.py b/sonar/util/constants.py index 6ee08ec2e..6a87b6a91 100644 --- a/sonar/util/constants.py +++ b/sonar/util/constants.py @@ -18,7 +18,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Sonar utility constants """ +"""Sonar utility constants""" CREATE = "CREATE" READ = "READ" diff --git a/sonar/util/sonar_cache.py b/sonar/util/sonar_cache.py index cd036e307..3b3fdfd10 100644 --- a/sonar/util/sonar_cache.py +++ b/sonar/util/sonar_cache.py @@ -19,6 +19,7 @@ # """Cache module""" + from typing import Optional from sonar import platform, projects, branches, pull_requests from sonar import applications, app_branches, portfolios diff --git a/sonar/util/types.py b/sonar/util/types.py index 4df92b610..25fad548c 100644 --- a/sonar/util/types.py +++ b/sonar/util/types.py @@ -18,7 +18,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Sonar Custom Types for type hints """ +"""Sonar Custom Types for type hints""" from typing import Union, Optional diff --git a/sonar/util/update_center.py b/sonar/util/update_center.py index 496555ad6..b3cf391ef 100644 --- a/sonar/util/update_center.py +++ b/sonar/util/update_center.py @@ -18,7 +18,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Update center utilities """ +"""Update center utilities""" import os from typing import Optional diff --git a/sonar/utilities.py b/sonar/utilities.py index 31340ff3b..32c5d0e6f 100644 --- a/sonar/utilities.py +++ b/sonar/utilities.py @@ -19,9 +19,10 @@ # """ - Utilities for sonar-tools +Utilities for sonar-tools """ + from typing import TextIO, Union, Optional from http import HTTPStatus import sys @@ -35,6 +36,8 @@ from copy import deepcopy import requests +import Levenshtein + import sonar.logging as log from sonar import version, errcodes from sonar.util import types, cache_helper @@ -467,26 +470,16 @@ def open_file(file: str = None, mode: str = "w") -> TextIO: def search_by_name(endpoint: object, name: str, api: str, returned_field: str, extra_params: dict[str, str] = None) -> Union[dict[str, str], None]: """Searches a object by name""" - params = {"q": name} - if extra_params is not None: - params.update(extra_params) + params = {"q": name} | (extra_params or {}) data = json.loads(endpoint.get(api, params=params).text) - for d in data[returned_field]: - if d["name"] == name: - return d - return None + return next((d for d in data[returned_field] if d["name"] == name), None) def search_by_key(endpoint: object, key: str, api: str, returned_field: str, extra_params: Optional[dict[str, str]] = None) -> types.ApiPayload: """Search an object by its key""" - params = {"q": key} - if extra_params is not None: - params.update(extra_params) + params = {"q": key} | (extra_params or {}) data = json.loads(endpoint.get(api, params=params).text) - for d in data[returned_field]: - if d["key"] == key: - return d - return None + return next((d for d in data[returned_field] if d["key"] == key), None) def sonar_error(response: requests.models.Response) -> str: @@ -822,3 +815,11 @@ def flatten(original_dict: dict[str, any]) -> dict[str, any]: else: flat_dict[k] = v return flat_dict + + +def similar_strings(key1: str, key2: str, max_distance: int = 5) -> bool: + """Returns whether 2 project keys are similar, but not equal""" + if key1 == key2: + return False + max_distance = min(len(key1) // 2, len(key2) // 2, max_distance) + return (len(key2) >= 7 and (re.match(key2, key1))) or Levenshtein.distance(key1, key2, score_cutoff=6) <= max_distance diff --git a/sonar/version.py b/sonar/version.py index 3e65b485e..799a03022 100644 --- a/sonar/version.py +++ b/sonar/version.py @@ -20,9 +20,9 @@ """ - sonar-tools project version +sonar-tools project version """ PACKAGE_VERSION = "3.17" -MIGRATION_TOOL_VERSION = "0.6-snapshot" +MIGRATION_TOOL_VERSION = "0.7" diff --git a/sonar/webhooks.py b/sonar/webhooks.py index 8d806ad79..0e47e4fdc 100644 --- a/sonar/webhooks.py +++ b/sonar/webhooks.py @@ -19,10 +19,14 @@ # """Abstraction of the SonarQube webhook concept""" + +from __future__ import annotations +from typing import Optional + import json import sonar.logging as log -from sonar import platform as pf +from sonar import platform as pf, exceptions from sonar.util import types, cache, constants as c import sonar.utilities as util import sonar.sqobject as sq @@ -38,27 +42,65 @@ class WebHook(sq.SqObject): """ CACHE = cache.Cache() - API = {c.CREATE: "webhooks/create", c.UPDATE: "webhooks/update", c.LIST: "webhooks/list"} + API = {c.CREATE: "webhooks/create", c.READ: "webhooks/list", c.UPDATE: "webhooks/update", c.LIST: "webhooks/list", c.DELETE: "webhooks/delete"} SEARCH_KEY_FIELD = "key" SEARCH_RETURN_FIELD = "webhooks" - def __init__( - self, endpoint: pf.Platform, name: str, url: str = None, secret: str = None, project: str = None, data: types.ApiPayload = None - ) -> None: + def __init__(self, endpoint: pf.Platform, name: str, url: str, secret: Optional[str] = None, project: Optional[str] = None) -> None: """Constructor""" super().__init__(endpoint=endpoint, key=name) - if data is None: - params = util.remove_nones({"name": name, "url": url, "secret": secret, "project": project}) - data = json.loads(self.post(WebHook.API[c.CREATE], params=params).text)["webhook"] - self.sq_json = data - self.name = data["name"] #: Webhook name - self.key = data["key"] #: Webhook key - self.webhook_url = data["url"] #: Webhook URL - self.secret = data.get("secret", None) #: Webhook secret + self.name = name #: Webhook name + self.webhook_url = url #: Webhook key + self.secret = secret #: Webhook secret + self.project = project #: Webhook project, optional + self.last_delivery = None #: Webhook last delivery timestamp self.project = project #: Webhook project if project specific webhook - self.last_delivery = data.get("latestDelivery", None) WebHook.CACHE.put(self) + @classmethod + def create(cls, endpoint: pf.Platform, name: str, url: str, secret: Optional[str] = None, project: Optional[str] = None) -> WebHook: + """Creates a WebHook object in SonarQube + + :param Platform endpoint: Reference to the SonarQube platform + :param str name: Webhook name + :param str url: Webhook URL + :param str secret: Webhook secret, optional + :param str project: Webhook project key, optional + :return: The created WebHook + """ + log.info("Creating webhook name %s, url %s project %s", name, url, str(project)) + params = util.remove_nones({"name": name, "url": url, "secret": secret, "project": project}) + endpoint.post(WebHook.API[c.CREATE], params=params) + o = cls(endpoint, name=name, url=url, secret=secret, project=project) + o.refresh() + return o + + @classmethod + def load(cls, endpoint: pf.Platform, data: types.ApiPayload) -> WebHook: + """Creates and loads a local WebHook object with data payload received from API + + :param Platform endpoint: Reference to the SonarQube platform + :param ApiPayload data: The webhook data received from the API + :return: The created WebHook + """ + name, project = data["name"], data.get("project", None) + if (o := WebHook.CACHE.get(name, project, endpoint.local_url)) is None: + o = WebHook(endpoint, name, data["url"], data.get("secret", None), project) + o.reload(data) + return o + + @classmethod + def get_object(cls, endpoint: pf.Platform, name: str, project_key: Optional[str] = None) -> WebHook: + """Gets a WebHook object from its name and an eventual project key""" + log.debug("Getting webhook name %s project key %s", name, str(project_key)) + if o := WebHook.CACHE.get(name, project_key, endpoint.local_url): + return o + try: + whs = list(get_list(endpoint, project_key).values()) + return next(wh for wh in whs if wh.name == name) + except StopIteration as e: + raise exceptions.ObjectNotFound(project_key, f"Webhook '{name}' of project '{project_key}' not found") from e + def __str__(self) -> str: return f"webhook '{self.name}'" @@ -67,21 +109,43 @@ def __hash__(self) -> int: Returns an object unique Id :meta private: """ - return hash((self.name, self.project if self.project else "", self.base_url())) + return hash((self.name, self.project, self.endpoint.local_url)) + + def refresh(self) -> None: + """Reads the Webhook data on the SonarQube platform and updates the local object""" + data = json.loads(self.get(WebHook.API[c.LIST], params=None if not self.project else {"project": self.project}).text) + wh_data = next((wh for wh in data["webhooks"] if wh["name"] == self.name), None) + if wh_data is None: + wh_name = str(self) + name = self.name + WebHook.CACHE.pop(self) + raise exceptions.ObjectNotFound(name, f"{wh_name} not found") + self.reload(wh_data) + + def reload(self, data: types.ApiPayload) -> None: + """Reloads a WebHook from the payload gotten from SonarQube""" + log.debug("Loading %s with %s", str(self), str(data)) + self.sq_json = self.sq_json or {} | data + self.name = data["name"] + self.key = data["key"] + self.webhook_url = data["url"] + self.secret = data.get("secret", None) or self.secret + self.last_delivery = data.get("latestDelivery", None) def url(self) -> str: """Returns the object permalink""" return f"{self.base_url(local=False)}/admin/webhooks" - def update(self, **kwargs) -> None: + def update(self, **kwargs: str) -> bool: """Updates a webhook with new properties (name, url, secret) :param kwargs: dict - "url", "name", "secret" are the looked up keys - :return: Nothing + :return: Whether the operation succeeded """ - params = util.remove_nones(kwargs) - params.update({"webhook": self.key}) - self.post(WebHook.API[c.UPDATE], params=params) + params = {"webhook": self.key, "name": self.name, "url": self.webhook_url} | util.remove_nones(kwargs) + ok = self.post(WebHook.API[c.UPDATE], params=params).ok + self.refresh() + return ok def audit(self) -> list[problem.Problem]: """ @@ -101,13 +165,17 @@ def to_json(self, full: bool = False) -> dict[str, any]: """ return util.filter_export(self.sq_json, _IMPORTABLE_PROPERTIES, full) + def api_params(self, op: str) -> types.ApiParams: + """Returns the std api params to pass for a given webhook""" + ops = {c.READ: {"webhook": self.key}} + return ops[op] if op and op in ops else ops[c.READ] + def search(endpoint: pf.Platform, params: types.ApiParams = None) -> dict[str, WebHook]: """Searches webhooks :param ApiParams params: Filters to narrow down the search, can only be "project" :return: List of webhooks - :rtype: dict{: } """ return sq.search_objects(endpoint=endpoint, object_class=WebHook, params=params) @@ -131,29 +199,19 @@ def export(endpoint: pf.Platform, project_key: str = None, full: bool = False) - return json_data if len(json_data) > 0 else None -def create(endpoint: pf.Platform, name: str, url: str, secret: str = None, project: str = None) -> WebHook: - """Creates a webhook, global if project key is None, othewise project specific""" - return WebHook(endpoint=endpoint, name=name, url=url, secret=secret, project=project) - - -def update(endpoint: pf.Platform, name: str, **kwargs) -> None: - """Updates a webhook with data in kwargs""" - project_key = kwargs.pop("project", None) - get_list(endpoint, project_key) - o = WebHook.CACHE.get(name, project_key, endpoint.local_url) - if not o: - create(endpoint, name, kwargs["url"], kwargs["secret"], project=project_key) - else: - get_object(endpoint, name, project_key=project_key, data=kwargs).update(**kwargs) - +def import_config(endpoint: pf.Platform, data: types.ObjectJsonRepr, project_key: Optional[str] = None) -> None: + """Imports a set of webhooks defined from a JSON description""" + log.debug("Importing webhooks %s for %s", str(data), str(project_key)) + current_wh = get_list(endpoint, project_key=project_key) + existing_webhooks = {wh.name: k for k, wh in current_wh.items()} -def get_object(endpoint: pf.Platform, name: str, project_key: str = None, data: types.ApiPayload = None) -> WebHook: - """Gets a WebHook object from name a project key""" - log.debug("Getting webhook name %s project key %s data = %s", name, str(project_key), str(data)) - o = WebHook.CACHE.get(name, project_key, endpoint.local_url) - if not o: - o = WebHook(endpoint=endpoint, name=name, project=project_key, data=data) - return o + # FIXME: Handle several webhooks with same name + for wh_name, wh_data in data.items(): + if wh_name in existing_webhooks: + current_wh[existing_webhooks[wh_name]].update(name=wh_name, **wh_data) + else: + hook = WebHook.create(endpoint=endpoint, name=wh_name, url=wh_data.get("url", "https://to.be.defined"), project=project_key) + hook.update(**wh_data) def audit(endpoint: pf.Platform) -> list[problem.Problem]: diff --git a/test/.sonar-audit.properties b/test/.sonar-audit.properties deleted file mode 100644 index 0baa454d7..000000000 --- a/test/.sonar-audit.properties +++ /dev/null @@ -1,304 +0,0 @@ -# -# sonar-tools -# Copyright (C) 2019-2025 Olivier Korach -# mailto:olivier.korach AT gmail DOT com -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 3 of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program; if not, write to the Free Software Foundation, -# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -# -#======================== SELECT AUDIT SCOPE CONFIGURATION ======================= - -# yes/no -audit.globalSettings = yes -audit.projects = yes -audit.qualityGates = yes -audit.qualityProfiles = yes -audit.users = yes -audit.groups = yes -# Portfolios and application audit (for DE (Apps) and EE, DCE (Apps, and Portfolios)) -audit.portfolios = yes -audit.applications = yes - -# Logs audit -audit.logs = yes - -# Plugins audit -audit.plugins = yes -audit.plugins.whitelist = dependencycheck, findbugs, pmd, mutationanalysis, perl - -#===================== GLOBAL SETTINGS AUDIT CONFIGURATION ==================== - -#----------------------- GLOBAL SETTINGS CONFIGURATION ------------------------ - -# Audit (and warn) for default project public visibility -audit.globalSettings.defaultProjectVisibility = private - -# Below settings audit structure is: -# For allowed range settings: -# audit.globalSettings.range. = , , , , -# For allowed value settings: -# audit.globalSetting.value. = , , , -# To check if a setting is set: -# audit.globalSettings.isSet. = , , - -# Audit (and warn) if cross project duplication is enabled -audit.globalSettings.value.1 = sonar.cpd.cross_project, false, HIGH, PERFORMANCE - -# Audit (and warn) if force authentication is disabled -audit.globalSettings.value.2 = sonar.forceAuthentication, true, HIGH, SECURITY - -# Audit (and warn) if server base URL is not set -audit.globalSettings.isSet.1 = sonar.core.serverBaseURL, HIGH, OPERATIONS - -#----------------------- DB CLEANER AUDIT CONFIGURATION ----------------------- - -# Audit (and warn) for suspicious DB cleaner settings -audit.globalSettings.dbcleaner = yes - -# Audit DB Cleaner min/max time before purging issues -audit.globalSettings.range.1 = sonar.dbcleaner.daysBeforeDeletingClosedIssues, 10, 60, MEDIUM, PERFORMANCE - -# Audit DB Cleaner min/max time before only keeping one analysis snapshot per day -audit.globalSettings.range.2 = sonar.dbcleaner.hoursBeforeKeepingOnlyOneSnapshotByDay, 12, 240, MEDIUM, PERFORMANCE - -# Audit DB Cleaner min/max time before only keeping one analysis snapshot per week -audit.globalSettings.range.3 = sonar.dbcleaner.weeksBeforeKeepingOnlyOneSnapshotByWeek, 2, 12, MEDIUM, PERFORMANCE - -# Audit DB Cleaner min/max time before only keeping one analysis snapshot per month -audit.globalSettings.range.4 = sonar.dbcleaner.weeksBeforeKeepingOnlyOneSnapshotByMonth, 26, 104, MEDIUM, PERFORMANCE - -# Audit DB Cleaner min/max time before deleting all snapshots -audit.globalSettings.range.5 = sonar.dbcleaner.weeksBeforeDeletingAllSnapshots, 104, 260, MEDIUM, PERFORMANCE - -# Audit DB Cleaner min/max time before deleting inactive branches (and PRs) -audit.globalSettings.range.6 = sonar.dbcleaner.daysBeforeDeletingInactiveBranches, 10, 60, MEDIUM, PERFORMANCE -audit.globalSettings.range.7 = sonar.dbcleaner.daysBeforeDeletingInactiveBranchesAndPRs, 10, 60, MEDIUM, PERFORMANCE - - -#------------------- TECH DEBT SETTINGS AUDIT CONFIGURATION ------------------- - -# Audit for suspicious technical debt thresholds, listed further below -audit.globalSettings.technicalDebt = yes - -# Audit if dev cost of 1 line is not within expected range (affects Tech Debt ratio and Maintainability rating metrics) -audit.globalSettings.range.7 = sonar.technicalDebt.developmentCost, 20, 30, MEDIUM, CONFIGURATION - -# Audit if maintainaibility rating thresholds are not within normal ranges -audit.globalSettings.maintainabilityRating.A.range.1 = 0.03, 0.05, MEDIUM, CONFIGURATION -audit.globalSettings.maintainabilityRating.A.range.2 = 0.02, 0.07, HIGH, CONFIGURATION -audit.globalSettings.maintainabilityRating.B.range.1 = 0.07, 0.10, MEDIUM, CONFIGURATION -audit.globalSettings.maintainabilityRating.B.range.2 = 0.05, 0.15, HIGH, CONFIGURATION -audit.globalSettings.maintainabilityRating.C.range.1 = 0.15, 0.20, MEDIUM, CONFIGURATION -audit.globalSettings.maintainabilityRating.C.range.2 = 0.10, 0.25, HIGH, CONFIGURATION -audit.globalSettings.maintainabilityRating.D.range.1 = 0.40, 0.50, MEDIUM, CONFIGURATION -audit.globalSettings.maintainabilityRating.D.range.2 = 0.30, 0.60, HIGH, CONFIGURATION - -# Min max heap allocated to the web process -audit.web.heapMin = 1024 -audit.web.heapMax = 2048 - -#======================= PERMISSIONS AUDIT CONFIGURATION ====================== - -#----------------------------- GLOBAL PERMISSIONS ----------------------------- -# Audit (and warn) for suspicious global permissions -audit.globalSettings.permissions = yes - -# Max allowed number of users/groups with global admin permission -audit.globalSettings.permissions.maxAdminUsers = 3 -audit.globalSettings.permissions.maxAdminGroups = 2 - -# Max allowed number of users/groups with quality gate admin permission -audit.globalSettings.permissions.maxGateAdminUsers = 3 -audit.globalSettings.permissions.maxGateAdminGroups = 2 - -# Max allowed number of users/groups with quality profile admin permission -audit.globalSettings.permissions.maxProfileAdminUsers = 3 -audit.globalSettings.permissions.maxProfileAdminGroups = 2 - -# Max allowed number of users/groups with execute analysis permission -audit.globalSettings.permissions.maxScanUsers = 3 -audit.globalSettings.permissions.maxScanGroups = 2 - -# Max allowed number of users/groups with create project permission -audit.globalSettings.permissions.maxCreateProjectUsers = 3 -audit.globalSettings.permissions.maxCreateProjectGroups = 3 - -#----------------------------- PROJECT/APP/PORTFOLIO/QP/QG MAX PERMISSIONS ---------------------------- -audit.permissions.maxUsers = 5 -audit.permissions.maxGroups = 5 - -#----------------------------- PROJECT/APP/PORTFOLIO MAX ADMIN PERMISSIONS ---------------------------- -audit.permissions.maxAdminUsers = 2 -audit.permissions.maxAdminGroups = 2 - -#----------------------------- PROJECT PERMISSIONS ---------------------------- -# Project permission audit -# Max sure there are not too many users/groups with given project permissions - -audit.projects.permissions.maxScanGroups = 1 -audit.projects.permissions.maxIssueAdminGroups = 2 -audit.projects.permissions.maxHotspotAdminGroups = 2 -# audit.projects.permissions.anyone = yes - -#========================= PROJECT AUDIT CONFIGURATION ======================== - -# Audit and warn) for projects likely to be duplicates -# Duplicate projects are detected from project keys that are similar -audit.projects.duplicates = yes - -# Audit and warn) for projects that have been provisioned but never analyzed -audit.projects.neverAnalyzed = yes - -# Audit (and warn) if project visibility is public -audit.projects.visibility = yes - -# Audit (and warn) for suspicious projects permissions -audit.projects.permissions = yes - -# Audit (and warn) for suspicious projects exclusions -audit.projects.exclusions = yes -# In the below: -# - All * . and ? symbols that relate to the SonarQube exclusion pattern should be escaped with \\ -# - All symbols that have a special meaning for regex pattern matching shall not be escaped -audit.projects.suspiciousExclusionsPatterns = \\*\\*/[^\/]+/\\*\\*, \\*\\*/\\*[\.\w]*, \\*\\*/\\*, \\*\\*/\\*\\.(java|jav|cs|csx|py|php|js|ts|sql|html|css|cpp|c|h|hpp)\\*? -audit.projects.suspiciousExclusionsExceptions = \\*\\*/(__pycache__|libs|lib|vendor|node_modules)/\\*\\* - -# Audit (and warn) for projects whose last analysis date is older than maxLastAnalysisAge -# Set property to 0 to turn off the check -audit.projects.maxLastAnalysisAge = 180 - -# Audit branches for zero LoC and last analysis date -audit.projects.branches = yes - -# Audits for branches whose last analysis is older than a given number of days -# This parameter is only considered for branches not marked as "keep when inactive" -# Set property to 0 to turn off the check -audit.projects.branches.maxLastAnalysisAge = 30 - -# Audits for PR whose last analysis is older than a given number of days -# Set property to 0 to turn off the check -audit.projects.pullRequests.maxLastAnalysisAge = 30 - -# Audits duplicate projects bound to same DevOps platform repo -audit.projects.bindings = yes - -# Audits that projects with bindings have valid bindings -# Off by default since each project binding validation takes 1 to 3 seconds by project (with ALM bindings) -# which can be too time consuming for platform with large number of bound projects -audit.projects.bindings.validation = no - -# Audits projects for disabled SCM -audit.project.scm.disabled = yes - -# Audits projects with suspiciously high proportion of utility LoCs (XML, JSON...) -audit.projects.utilityLocs = yes - -# Audits projects for analysis warnings -audit.projects.analysisWarnings = yes - -# Audits projects with last background task failed -audit.projects.failedTasks = yes - -# Audits projects analyzed with too old scanner version, 2 years by default -audit.projects.scannerMaxAge = 730 - -#====================== QUALITY GATES AUDIT CONFIGURATION ===================== - -# Audit that there are not too many quality gates, this defeats company common governance -audit.qualitygates.maxNumber = 5 - -# Audit that quality gates don't have too many criterias, it's too complex and -# may prevent passing QG because of incorrect QG criteria -audit.qualitygates.maxConditions = 8 - -# Audits that QGs only use the meaningful metrics (those that make sense in a QG) -audit.qualitygates.allowedMetrics = new_reliability_rating, new_security_rating, new_maintainability_rating, new_bugs, new_vulnerabilities, new_security_hotspots, new_security_hotspots_reviewed, new_blocker_violations, new_critical_violations, new_major_violations, new_duplicated_lines_density, reliability_rating, security_rating - -#------------------------ AUDIT OF METRICS ON NEW CODE ------------------------ - -# Audit that reliability, security, maintainability, hotspot review ratings, if used, are A -# if rating is used as a QG criteria -audit.qualitygates.new_reliability_rating.value = 1 -audit.qualitygates.new_security_rating.value = 1 -audit.qualitygates.new_hotspot_rating.value = 1 -audit.qualitygates.new_maintainability_rating.value = 1 - -# Audit that coverage on new code, if used, is between 20% and 90% -audit.qualitygates.new_coverage.range = 20,90 - -# Audit that new bugs, vulnerabilities, unreviewed hotspots metric, if used, is 0 -audit.qualitygates.new_bugs.value = 0 -audit.qualitygates.new_vulnerabilities.value = 0 -audit.qualitygates.new_security_hotspots.value = 0 - -# Audit that % of hotspots review on new code, if used, is 100% -audit.qualitygates.new_security_hotspots_reviewed.value = 100 - -# Audit that new blockers/critical/major issues metric, if used, is 0 -audit.qualitygates.new_blocker_violations.value = 0 -audit.qualitygates.new_critical_violations.value = 0 -audit.qualitygates.new_major_violations.value = 0 - -# Audit that duplication on new code, if used, is between 1% and 5% -audit.qualitygates.new_duplicated_lines_density.range = 1, 5 - -#---------------------- AUDIT OF METRICS ON OVERALL CODE ---------------------- - -# Audit that reliability/security/hotspot rating on overall code, if used, is not too strict -audit.qualitygates.reliability_rating.range = 4, 4 -audit.qualitygates.security_rating.range = 3, 4 -audit.qualitygates.hotspot_rating.range = 4, 4 - - -#===================== QUALITY PROFILES AUDIT CONFIGURATION =================== - -# Audit QP not changed since a given number of days -audit.qualityProfiles.maxLastChangeAge = 180 - -# Audit quality profiles with too few rules (0.5 = 50% of all rules) -audit.qualityProfiles.minNumberOfRules = 0.5 - -# Audit quality profiles not used for a given number of days -audit.qualityProfiles.maxUnusedAge = 60 - -# Audit quality profiles for usage of deprecated rules -audit.qualityProfiles.checkDeprecatedRules = yes - -#========================= USERS AND GROUPS AUDIT CONFIGURATION ========================= - -# Audit for users that have not logged in for a given number of days -audit.users.maxLoginAge = 180 - -# Audit for days after which a token should be revoked (and potentially renewed) -audit.tokens.maxAge = 90 - -# Audit for days after which an unused token should be revoked (and potentially renewed) -audit.tokens.maxUnusedAge = 30 - -# Comma separated list of SonarQube users whose tokens are not considered for expiration -audit.tokens.neverExpire = - -# Audit (and warn) for empty groups -audit.groups.empty = yes - -#========================= PORTFOLIOS AND APPS AUDIT CONFIGURATION ======================== - - -# Audit (and warn) for portfolios composed of 0 or/and 1 projects -audit.portfolios.empty = yes -audit.portfolios.singleton = yes - -# Audit (and warn) for applications composed of 0 or/and 1 projects -audit.applications.empty = yes -audit.applications.singleton = yes diff --git a/test/integration/it-tools.sh b/test/integration/it-tools.sh index 4084a1b92..a483b68a3 100644 --- a/test/integration/it-tools.sh +++ b/test/integration/it-tools.sh @@ -22,9 +22,9 @@ #set -euo pipefail REPO_ROOT="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; cd ../.. ; pwd -P )" -TMP="$REPO_ROOT/tmp" -IT_LOG_FILE="$TMP/it.log" -mkdir -p "$TMP" +TMP="${REPO_ROOT}/tmp" +IT_LOG_FILE="${TMP}/it.log" +mkdir -p "${TMP}" YELLOW=$(tput setaf 3) RED=$(tput setaf 1) @@ -36,54 +36,54 @@ function logmsg { } function run_test { - file=$1; shift + file=${1}; shift announced_args=$(get_announced_args $@) - announce_test "$announced_args -f $file" - if [ "$1" != "docker" ]; then - file="$REPO_ROOT/tmp/$file" + announce_test "${announced_args} -f ${file}" + if [[ "${1}" != "docker" ]]; then + file="${REPO_ROOT}/tmp/${file}" fi - if [ "$SONAR_HOST_URL" == "$SONAR_HOST_URL_SONARCLOUD" ]; then - "$@" -o okorach -f "$file" 2>>$IT_LOG_FILE + if [[ "${SONAR_HOST_URL}" == "${SONAR_HOST_URL}_SONARCLOUD" ]]; then + "$@" -o okorach -f "${file}" 2>>$IT_LOG_FILE else - # echo "$@" -f "$file" - "$@" -f "$file" 2>>$IT_LOG_FILE + # echo "$@" -f "${file}" + "$@" -f "${file}" 2>>$IT_LOG_FILE fi - test_passed_if_file_not_empty "$file" + test_passed_if_file_not_empty "${file}" } function get_announced_args { skipnext="false" announced_args="" for arg in $@; do - if [ "$arg" = "-t" ] || [ "$arg" = "-u" ]; then + if [[ "${arg}" = "-t" ]] || [[ "${arg}" = "-u" ]]; then skipnext="true" - elif [ "$skipnext" = "true" ]; then + elif [[ "${skipnext}" = "true" ]]; then skipnext="false" else - announced_args="$announced_args $arg" + announced_args="${announced_args} ${arg}" fi done - echo $announced_args + echo ${announced_args} } function run_test_stdout { - file=$1; shift + file=${1}; shift announced_args=$(get_announced_args $@) - announce_test "$announced_args >$file" - file="$REPO_ROOT/tmp/$file" - if [ "$SONAR_HOST_URL" == "$SONAR_HOST_URL_SONARCLOUD" ]; then - "$@" -o okorach >"$file" 2>>$IT_LOG_FILE + announce_test "${announced_args} >${file}" + file="${REPO_ROOT}/tmp/${file}" + if [[ "${SONAR_HOST_URL}" == "${SONAR_HOST_URL}_SONARCLOUD" ]]; then + "$@" -o okorach >"${file}" 2>>$IT_LOG_FILE else - "$@" >"$file" 2>>$IT_LOG_FILE + "$@" >"${file}" 2>>$IT_LOG_FILE fi - test_passed_if_file_not_empty "$file" + test_passed_if_file_not_empty "${file}" } check_file_not_empty() { - if [ -s "$1" ]; then - logmsg "Output file $1 is OK" + if [[ -s "${1}" ]]; then + logmsg "Output file ${1} is OK" else - logmsg "Output file $1 is missing or empty" + logmsg "Output file ${1} is missing or empty" # exit 1 fi } @@ -96,14 +96,14 @@ test_passed_if_identical() { } test_passed_if_file_not_empty() { - [ -s "$1" ] + [[ -s "${1}" ] code=$? test_result $code return $code } test_result() { - if [ $1 -eq 0 ]; then + if [[ ${1} -eq 0 ]]; then echo -e "--> ${GREEN}PASSED${RESET}" else echo -e "==> ${RED}*** FAILED ***${RESET}" diff --git a/test/integration/it.sh b/test/integration/it.sh index 78db32d9b..aa11e451e 100755 --- a/test/integration/it.sh +++ b/test/integration/it.sh @@ -22,12 +22,12 @@ # set -euo pipefail DIR="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" -source "$DIR/it-tools.sh" +source "${DIR}/it-tools.sh" DB_BACKUPS_DIR=~/backup IT_TEST_PORT=9888 function backup_for { - case $1 in + case ${1} in lts|lta|lta-ce|lts-de|lta-de) db="$DB_BACKUPS_DIR/db.lts.backup" ;; @@ -44,14 +44,14 @@ function backup_for { db="$DB_BACKUPS_DIR/db.latest.backup" ;; *) - logmsg "ERROR: Instance $1 has no corresponding DB backup" + logmsg "ERROR: Instance ${1} has no corresponding DB backup" db="NO_DB_BACKUP" esac echo $db } function tag_for { - case $1 in + case ${1} in lts|lta|lts-audit|lta-audit) tag="2025-lta-enterprise" ;; @@ -71,17 +71,17 @@ function tag_for { tag="9-enterprise" ;; *) - logmsg "ERROR: Instance $1 has no corresponding tag" + logmsg "ERROR: Instance ${1} has no corresponding tag" tag="NO_TAG" esac echo $tag } -[ $# -eq 0 ] && echo "Usage: $0 [... ]" && exit 1 -rm -f "$TMP"/*.log "$TMP"/*.csv "$TMP"/*.json +[ $# -eq 0 ]] && echo "Usage: $0 [... ]" && exit 1 +rm -f "${TMP}"/*.log "${TMP}"/*.csv "${TMP}"/*.json noExport=0 -if [ "$1" == "--noExport" ]; then +if [[ "${1}" == "--noExport" ]]; then noExport=1 shift fi @@ -91,107 +91,107 @@ logmsg "$(date)" for env in "$@" do - logmsg "Install sonar-tools current local version: root = $TMP" - cd "$REPO_ROOT"; ./deploy.sh nodoc nodocker 1>$IT_LOG_FILE 2>&1; cd - + logmsg "Install sonar-tools current local version: root = ${TMP}" + cd "${REPO_ROOT}"; ./deploy.sh nodoc nodocker 1>$IT_LOG_FILE 2>&1; cd - - if [ "$env" = "sonarcloud" ]; then - logmsg "Running with environment $env" - export SONAR_TOKEN=$SONAR_TOKEN_SONARCLOUD - export SONAR_HOST_URL=$SONAR_HOST_URL_SONARCLOUD + if [[ "${env}" = "sonarcloud" ]]; then + logmsg "Running with environment ${env}" + export SONAR_TOKEN=${SONAR_TOKEN_SONARCLOUD} + export SONAR_HOST_URL=${SONAR_HOST_URL_SONARCLOUD} else id="it$$" - logmsg "Creating IT test environment $env - sonarId $id" - sqport=$IT_TEST_PORT - pgport=$(expr $sqport - 4000) - echo sonar create -i $id -t "$(tag_for "$env")" -s $sqport -p $pgport -f "$(backup_for "$env")" - sonar create -i $id -t "$(tag_for "$env")" -s $sqport -p $pgport -f "$(backup_for "$env")" 1>$IT_LOG_FILE 2>&1 + logmsg "Creating IT test environment ${env} - sonarId ${id}" + sqport=${IT_TEST_PORT} + pgport=$(expr ${sqport} - 4000) + echo sonar create -i ${id} -t "$(tag_for "${env}")" -s ${sqport} -p ${pgport} -f "$(backup_for "${env}")" + sonar create -i ${id} -t "$(tag_for "${env}")" -s ${sqport} -p ${pgport} -f "$(backup_for "${env}")" 1>$IT_LOG_FILE 2>&1 export SONAR_TOKEN=$SONAR_TOKEN_ADMIN_USER - if [[ "$env" =~ ^9.*$ ]]; then + if [[ "${env}" =~ ^9.*$ ]]; then logmsg "Using 9 token" - export SONAR_TOKEN=$SONAR_TOKEN_9_ADMIN_USER + export SONAR_TOKEN=${SONAR_TOKEN_9_ADMIN_USER} fi - export SONAR_HOST_URL="http://localhost:$sqport" + export SONAR_HOST_URL="http://localhost:${sqport}" fi - logmsg "=====> IT sonar-measures-export $env" + logmsg "=====> IT sonar-measures-export ${env}" - f="measures-$env-unrel.csv"; run_test "$f" sonar-measures-export -b '.+' -m _main --withURL - f="measures-$env-2.csv"; run_test_stdout "$f" sonar-measures-export -b '.+' -m _main --withURL - f="measures-$env-3.csv"; run_test_stdout "$f" sonar-measures-export -b '.+' -p -r -d -m _all + f="measures-${env}-unrel.csv"; run_test "${f}" sonar-measures-export -b '.+' -m _main --withURL + f="measures-${env}-2.csv"; run_test_stdout "${f}" sonar-measures-export -b '.+' -m _main --withURL + f="measures-${env}-3.csv"; run_test_stdout "${f}" sonar-measures-export -b '.+' -p -r -d -m _all - f="measures-$env-1.json"; run_test "$f" sonar-measures-export -b '.+' -m _all - f="measures-$env-2.json"; run_test_stdout "$f" sonar-measures-export -b '.+' -p -r -d -m _all --format json - f="measures-$env-3.csv"; run_test "$f" sonar-measures-export -b '.+' --csvSeparator '+' -m _main + f="measures-${env}-1.json"; run_test "${f}" sonar-measures-export -b '.+' -m _all + f="measures-${env}-2.json"; run_test_stdout "${f}" sonar-measures-export -b '.+' -p -r -d -m _all --format json + f="measures-${env}-3.csv"; run_test "${f}" sonar-measures-export -b '.+' --csvSeparator '+' -m _main - f="measures-history-$env-1.csv"; run_test "$f" sonar-measures-export -b '.+' --history - f="measures-history-$env-2.csv"; run_test "$f" sonar-measures-export -b '.+' -k okorach_sonar-tools --history --asTable - f="measures-history-$env-3.json"; run_test "$f" sonar-measures-export -b '.+' --history + f="measures-history-${env}-1.csv"; run_test "${f}" sonar-measures-export -b '.+' --history + f="measures-history-${env}-2.csv"; run_test "${f}" sonar-measures-export -b '.+' -k okorach_sonar-tools --history --asTable + f="measures-history-${env}-3.json"; run_test "${f}" sonar-measures-export -b '.+' --history - logmsg "=====> IT sonar-findings-export $env" + logmsg "=====> IT sonar-findings-export ${env}" - f="findings-$env-unrel.csv"; run_test "$f" sonar-findings-export -v DEBUG - f="findings-$env-1.json"; run_test "$f" sonar-findings-export - f="findings-$env-2.json"; run_test_stdout "$f" sonar-findings-export -v DEBUG --format json -k '(okorach_audio-video-tools|okorach_sonar-tools)' - f="findings-$env-3.json"; run_test_stdout "$f" sonar-findings-export -v DEBUG --format json -k '(okorach_audio-video-tools|okorach_sonar-tools)' --useFindings - f="findings-$env-4.csv"; run_test_stdout "$f" sonar-findings-export --format csv -k '(okorach_audio-video-tools|okorach_sonar-tools)' --csvSeparator '+' + f="findings-${env}-unrel.csv"; run_test "${f}" sonar-findings-export -v DEBUG + f="findings-${env}-1.json"; run_test "${f}" sonar-findings-export + f="findings-${env}-2.json"; run_test_stdout "${f}" sonar-findings-export -v DEBUG --format json -k '(okorach_audio-video-tools|okorach_sonar-tools)' + f="findings-${env}-3.json"; run_test_stdout "${f}" sonar-findings-export -v DEBUG --format json -k '(okorach_audio-video-tools|okorach_sonar-tools)' --useFindings + f="findings-${env}-4.csv"; run_test_stdout "${f}" sonar-findings-export --format csv -k '(okorach_audio-video-tools|okorach_sonar-tools)' --csvSeparator '+' - if [ "$env" = "sonarcloud" ]; then - logmsg "IT $env sonar-audit SKIPPED" - logmsg "IT $env sonar-housekeeper SKIPPED" + if [[ "${env}" = "sonarcloud" ]]; then + logmsg "IT ${env} sonar-audit SKIPPED" + logmsg "IT ${env} sonar-housekeeper SKIPPED" else - logmsg "=====> IT sonar-audit $env" - f="audit-$env-unrel.csv"; run_test_stdout "$f" sonar-audit - f="audit-$env-1.json"; run_test "$f" sonar-audit - f="audit-$env-2.json"; run_test_stdout "$f" sonar-audit --format json --what qualitygates,qualityprofiles,settings - f="audit-$env-3.csv"; run_test_stdout "$f" sonar-audit --csvSeparator '+' --format csv - - logmsg "=====> IT sonar-housekeeper $env" - f="housekeeper-$env-1.csv"; run_test_stdout "$f" sonar-housekeeper -P 365 -B 90 -T 180 -R 30 + logmsg "=====> IT sonar-audit ${env}" + f="audit-${env}-unrel.csv"; run_test_stdout "${f}" sonar-audit + f="audit-${env}-1.json"; run_test "${f}" sonar-audit + f="audit-${env}-2.json"; run_test_stdout "${f}" sonar-audit --format json --what qualitygates,qualityprofiles,settings + f="audit-${env}-3.csv"; run_test_stdout "${f}" sonar-audit --csvSeparator '+' --format csv + + logmsg "=====> IT sonar-housekeeper ${env}" + f="housekeeper-${env}-1.csv"; run_test_stdout "${f}" sonar-housekeeper -P 365 -B 90 -T 180 -R 30 fi - logmsg "=====> IT sonar-loc $env" - f="loc-$env-1.csv"; run_test_stdout "$f" sonar-loc - f="loc-$env-unrel.csv"; run_test_stdout "$f" sonar-loc -n -a - f="loc-$env-2.csv"; run_test "$f" sonar-loc -n -a --csvSeparator ';' - - logmsg "=====> IT sonar-rules $env" - f="rules-$env-1.csv"; run_test_stdout "$f" sonar-rules -e - f="rules-$env-2.csv"; run_test "$f" sonar-rules -e - f="rules-$env-3.json"; run_test_stdout "$f" sonar-rules -e --format json - f="rules-$env-4.json"; run_test "$f" sonar-rules -e - - logmsg "=====> IT sonar-config $env" - f="config-$env-1.json"; run_test_stdout "$f" sonar-config -e -w "qualitygates, qualityprofiles, projects" -k '(okorach_audio-video-tools|okorach_sonar-tools)' - f="config-$env-2.json"; run_test_stdout "$f" sonar-config --export - f="config-$env-unrel.json"; run_test "$f" sonar-config --export - - logmsg "=====> IT sonar-projects-export $env" - if [ $noExport -eq 1 ]; then - logmsg "sonar-projects-export $env test skipped" - elif [ "$env" = "sonarcloud" ]; then - logmsg "sonar-projects-export $env SKIPPED" + logmsg "=====> IT sonar-loc ${env}" + f="loc-${env}-1.csv"; run_test_stdout "${f}" sonar-loc + f="loc-${env}-unrel.csv"; run_test_stdout "${f}" sonar-loc -n -a + f="loc-${env}-2.csv"; run_test "${f}" sonar-loc -n -a --csvSeparator ';' + + logmsg "=====> IT sonar-rules ${env}" + f="rules-${env}-1.csv"; run_test_stdout "${f}" sonar-rules -e + f="rules-${env}-2.csv"; run_test "${f}" sonar-rules -e + f="rules-${env}-3.json"; run_test_stdout "${f}" sonar-rules -e --format json + f="rules-${env}-4.json"; run_test "${f}" sonar-rules -e + + logmsg "=====> IT sonar-config ${env}" + f="config-${env}-1.json"; run_test_stdout "${f}" sonar-config -e -w "qualitygates, qualityprofiles, projects" -k '(okorach_audio-video-tools|okorach_sonar-tools)' + f="config-${env}-2.json"; run_test_stdout "${f}" sonar-config --export + f="config-${env}-unrel.json"; run_test "${f}" sonar-config --export + + logmsg "=====> IT sonar-projects-export ${env}" + if [[ ${noExport} -eq 1 ]]; then + logmsg "sonar-projects-export ${env} test skipped" + elif [[ "${env}" = "sonarcloud" ]]; then + logmsg "sonar-projects-export ${env} SKIPPED" else - logmsg "sonar-projects-export $env" - f="proj-export-$env-2.json"; run_test "$f" sonar-projects-export + logmsg "sonar-projects-export ${env}" + f="proj-export-${env}-2.json"; run_test "${f}" sonar-projects-export fi - logmsg "=====> IT sonar-findings-export $env ADMIN export" - f1="findings-$env-admin.csv"; run_test "$f1" sonar-findings-export -v DEBUG -k '(okorach_audio-video-tools|okorach_sonar-tools)' + logmsg "=====> IT sonar-findings-export ${env} ADMIN export" + f1="findings-${env}-admin.csv"; run_test "${f1}" sonar-findings-export -v DEBUG -k '(okorach_audio-video-tools|okorach_sonar-tools)' #-------------------------------------------------------------------------- - source "$DIR"/it-docker.sh "$env" + source "${DIR}"/it-docker.sh "${env}" - if [ "$env" = "sonarcloud" ]; then - logmsg "sonar-projects-export $env SKIPPED" + if [[ "${env}" = "sonarcloud" ]]; then + logmsg "sonar-projects-export ${env} SKIPPED" else - logmsg "=====> IT sonar-findings-export $env USER export" + logmsg "=====> IT sonar-findings-export ${env} USER export" export SONAR_TOKEN=$SONAR_TOKEN_USER_USER - if [[ "$env" =~ ^lts.*$ ]]; then + if [[ "${env}" =~ ^lts.*$ ]]; then logmsg "Using LTS token" export SONAR_TOKEN=$SONAR_TOKEN_LTS_USER_USER fi - f2="findings-$env-user.csv"; run_test "$f2" sonar-findings-export -v DEBUG -k '(okorach_audio-video-tools|okorach_sonar-tools)' + f2="findings-${env}-user.csv"; run_test "${f}2" sonar-findings-export -v DEBUG -k '(okorach_audio-video-tools|okorach_sonar-tools)' fi # Restore admin token as long as previous version is 2.9 or less @@ -200,39 +200,39 @@ do export SONAR_TOKEN=$SONAR_TOKEN_ADMIN_USER - if [[ "$env" =~ ^lts.*$ ]]; then + if [[ "${env}" =~ ^lts.*$ ]]; then logmsg "Using LTS token" export SONAR_TOKEN=$SONAR_TOKEN_LTS_ADMIN_USER fi - logmsg "=====> IT released tools $env" - f="measures-$env-rel.csv"; run_test "$f" sonar-measures-export -b '.+' -m _main --withURL - f="findings-$env-rel.csv"; run_test "$f" sonar-findings-export + logmsg "=====> IT released tools ${env}" + f="measures-${env}-rel.csv"; run_test "${f}" sonar-measures-export -b '.+' -m _main --withURL + f="findings-${env}-rel.csv"; run_test "${f}" sonar-findings-export # Breaks in version 3.3 - # f="audit-$env-rel.csv"; run_test "$f" sonar-audit -f "audit-$env-rel.csv" - f="loc-$env-rel.csv"; run_test "$f" sonar-loc -n -a - f="config-$env-rel.json"; run_test "$f" sonar-config -e + # f="audit-${env}-rel.csv"; run_test "${f}" sonar-audit -f "audit-${env}-rel.csv" + f="loc-${env}-rel.csv"; run_test "${f}" sonar-loc -n -a + f="config-${env}-rel.json"; run_test "${f}" sonar-config -e - logmsg "IT compare released and unreleased $env" + logmsg "IT compare released and unreleased ${env}" for f in measures loc do - root="$TMP/$f-$env" - announce_test "$f-$env diff" - sort -o "$root-rel.csv" "$root-rel.csv" - sort -o "$root-unrel.csv" "$root-unrel.csv" - test_passed_if_identical "$root-rel.csv" "$root-unrel.csv" + root="${TMP}/${f}-${env}" + announce_test "${f}-${env} diff" + sort -o "${root}-rel.csv" "${root}-rel.csv" + sort -o "${root}-unrel.csv" "${root}-unrel.csv" + test_passed_if_identical "${root}-rel.csv" "${root}-unrel.csv" done for f in config do - root="$TMP/$f-$env" - announce_test "$f-$env diff" - test_passed_if_identical "$root-rel.json" "$root-unrel.json" + root="${TMP}/${f}-${env}" + announce_test "${f}-${env} diff" + test_passed_if_identical "${root}-rel.json" "${root}-unrel.json" done - announce_test "=====> findings-$env admin vs user diff" - test_passed_if_identical "$TMP/findings-$env-admin.csv" "$TMP/findings-$env-user.csv" + announce_test "=====> findings-${env} admin vs user diff" + test_passed_if_identical "${TMP}/findings-${env}-admin.csv" "${TMP}/findings-${env}-user.csv" - if [ "$env" != "sonarcloud" ]; then - logmsg "Deleting environment sonarId $id" - sonar delete -i "$id" 1>$IT_LOG_FILE 2>&1 + if [[ "${env}" != "sonarcloud" ]]; then + logmsg "Deleting environment sonarId ${id}" + sonar delete -i "${id}" 1>$IT_LOG_FILE 2>&1 fi done diff --git a/test/test-sync.sh b/test/test-sync.sh index ef2b221f4..520310608 100755 --- a/test/test-sync.sh +++ b/test/test-sync.sh @@ -4,9 +4,9 @@ for proj in source target do curl -X POST -u "$SONAR_TOKEN:" "$SONAR_HOST_URL/api/projects/delete?project=$proj" opts=("-Dsonar.projectKey=$proj" "-Dsonar.projectName=$proj") - scan.sh "${opts[@]}" "$@" + conf/run_all.sh "${opts[@]}" "$@" for branch in release-1.x release-2.x do - scan.sh "${opts[@]}" "$@" "-Dsonar.branch.name=$branch" + conf/run_all.sh "${opts[@]}" "$@" "-Dsonar.branch.name=$branch" done done diff --git a/test/unit/conftest.py b/test/unit/conftest.py index aa2fa8f60..07bf2d435 100644 --- a/test/unit/conftest.py +++ b/test/unit/conftest.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Test fixtures """ +"""Test fixtures""" import os from collections.abc import Generator diff --git a/test/unit/test_apps.py b/test/unit/test_apps.py index 13d27d0d1..a481dd0e4 100644 --- a/test/unit/test_apps.py +++ b/test/unit/test_apps.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" applications tests """ +"""applications tests""" import datetime from collections.abc import Generator @@ -76,7 +76,7 @@ def test_get_object_non_existing() -> None: return with pytest.raises(exceptions.ObjectNotFound) as e: _ = App.get_object(endpoint=tutil.SQ, key=NON_EXISTING_KEY) - assert str(e.value).endswith(f"Application key '{NON_EXISTING_KEY}' not found") + assert str(e.value).endswith(f"Application '{NON_EXISTING_KEY}' not found") def test_exists(get_test_app: Generator[App]) -> None: diff --git a/test/unit/test_audit.py b/test/unit/test_audit.py index fed6c2d5e..2ff2449d4 100644 --- a/test/unit/test_audit.py +++ b/test/unit/test_audit.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-audit tests """ +"""sonar-audit tests""" import os from collections.abc import Generator diff --git a/test/unit/test_branches.py b/test/unit/test_branches.py new file mode 100644 index 000000000..e89c80145 --- /dev/null +++ b/test/unit/test_branches.py @@ -0,0 +1,193 @@ +# +# sonar-tools tests +# Copyright (C) 2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +"""applications tests""" + +import pytest + +import utilities as tutil +from sonar import projects, branches, exceptions +import sonar.util.constants as c + +SUPPORTED_EDITIONS = (c.DE, c.EE, c.DCE) + + +def verify_branch_support(func: callable, **kwargs) -> bool: + if kwargs["concerned_object"].endpoint.edition() not in SUPPORTED_EDITIONS: + with pytest.raises(exceptions.UnsupportedOperation): + _ = func(**kwargs) + return False + return True + + +def test_get_object() -> None: + """Test get_object and verify that if requested twice the same object is returned""" + + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert str(obj) == f"branch 'develop' of project '{project.key}'" + obj.refresh() + + +def test_not_found() -> None: + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + with pytest.raises(exceptions.ObjectNotFound): + obj = branches.Branch.get_object(concerned_object=project, branch_name="non-existing") + + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + obj.name = "non-existing2" + with pytest.raises(exceptions.ObjectNotFound): + obj.refresh() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + obj.concerned_object.key = "non-existing2" + with pytest.raises(exceptions.ObjectNotFound): + obj.new_code() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_is_main_is_kept(): + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + obj = branches.Branch.get_object(concerned_object=project, branch_name="develop") + obj._keep_when_inactive = None + obj.refresh() + assert obj.is_kept_when_inactive() in (True, False) + obj._is_main = None + assert obj.is_main() in (True, False) + + +def test_set_as_main(): + """test_set_as_main""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + main_br_name = project.main_branch_name() + main_br = branches.Branch.get_object(concerned_object=project, branch_name=main_br_name) + assert main_br.is_main() + assert not dev_br.is_main() + + if tutil.SQ.version() < (10, 0, 0): + with pytest.raises(exceptions.UnsupportedOperation): + dev_br.set_as_main() + return + + assert dev_br.set_as_main() + assert not main_br.is_main() + assert dev_br.is_main() + + assert main_br.set_as_main() + + main_br.name = "non-existing-main" + with pytest.raises(exceptions.ObjectNotFound): + main_br.set_as_main() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_set_keep_as_inactive(): + """test_set_keep_as_inactive""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + master_br = branches.Branch.get_object(concerned_object=project, branch_name="master") + assert dev_br.is_kept_when_inactive() + assert master_br.is_kept_when_inactive() + + assert dev_br.set_keep_when_inactive(False) + assert not dev_br.is_kept_when_inactive() + assert master_br.is_kept_when_inactive() + + assert dev_br.set_keep_when_inactive(True) + + dev_br.name = "non-existing-develop" + with pytest.raises(exceptions.ObjectNotFound): + dev_br.set_keep_when_inactive(True) + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_rename(): + """test_rename""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + main_br_name = project.main_branch_name() + main_br = branches.Branch.get_object(concerned_object=project, branch_name=main_br_name) + with pytest.raises(exceptions.UnsupportedOperation): + dev_br.rename("release") + + new_name = "gold" + assert main_br.rename(new_name) + assert not main_br.rename(new_name) + + new_br = branches.Branch.get_object(concerned_object=project, branch_name=new_name) + assert new_br is main_br + assert main_br.rename(main_br_name) + assert new_br.name == main_br_name + + +def test_get_findings(): + """test_get_findings""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert len(dev_br.get_findings()) > 0 + + dev_br.name = "non-existing-dev2" + with pytest.raises(exceptions.ObjectNotFound): + dev_br.get_findings() + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_audit(): + """test_audit_off""" + project = projects.Project.get_object(tutil.SQ, tutil.LIVE_PROJECT) + if not verify_branch_support(branches.Branch.get_object, concerned_object=project, branch_name="develop"): + return + dev_br = branches.Branch.get_object(concerned_object=project, branch_name="develop") + assert len(dev_br.audit({"audit.project.branches": False})) == 0 + + dev_br.name = "non-existing-dev3" + assert len(dev_br.audit({})) == 0 + branches.Branch.CACHE.clear() + projects.Project.CACHE.clear() + + +def test_exists(): + """test_exists""" + if tutil.SQ.edition() == c.CE: + with pytest.raises(exceptions.UnsupportedOperation): + branches.exists(tutil.SQ, branch_name="develop", project_key=tutil.LIVE_PROJECT) + else: + assert branches.exists(tutil.SQ, branch_name="develop", project_key=tutil.LIVE_PROJECT) + assert not branches.exists(tutil.SQ, branch_name="foobar", project_key=tutil.LIVE_PROJECT) diff --git a/test/unit/test_cli.py b/test/unit/test_cli.py index b2ad2d1f7..c073316a8 100644 --- a/test/unit/test_cli.py +++ b/test/unit/test_cli.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" projects_cli tests """ +"""projects_cli tests""" from collections.abc import Generator import json diff --git a/test/unit/test_common_audit.py b/test/unit/test_common_audit.py index 1e5e23a4e..9bd4193b3 100644 --- a/test/unit/test_common_audit.py +++ b/test/unit/test_common_audit.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Common tests, independent of SonarQube version """ +"""Common tests, independent of SonarQube version""" import os import csv diff --git a/test/unit/test_common_misc.py b/test/unit/test_common_misc.py index 1bda49574..17e6fd5b5 100644 --- a/test/unit/test_common_misc.py +++ b/test/unit/test_common_misc.py @@ -19,9 +19,10 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Common tests, independent of SonarQube version """ +"""Common tests, independent of SonarQube version""" -from sonar import utilities +from cli import sonar_tools +from sonar import utilities, errcodes from sonar.util import sonar_cache import utilities as tutil @@ -43,4 +44,4 @@ def test_clear_cache() -> None: def test_sonar_tools_help() -> None: """test_sonar_tools_help""" - assert tutil.run_cmd("sonar-tools-help") == 0 + assert tutil.run_cmd(sonar_tools.main, "sonar-tools-help") == errcodes.OK diff --git a/test/unit/test_common_sif.py b/test/unit/test_common_sif.py index b169a8e3c..eafc11265 100644 --- a/test/unit/test_common_sif.py +++ b/test/unit/test_common_sif.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Common tests, independent of SonarQube version """ +"""Common tests, independent of SonarQube version""" import os, stat from collections.abc import Generator diff --git a/test/unit/test_common_sonarcloud.py b/test/unit/test_common_sonarcloud.py index 995742f64..b4652f834 100644 --- a/test/unit/test_common_sonarcloud.py +++ b/test/unit/test_common_sonarcloud.py @@ -18,7 +18,7 @@ # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonarcloud tests """ +"""sonarcloud tests""" import os import pytest diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 1689fbf75..9ca7bcac8 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-config tests """ +"""sonar-config tests""" from collections.abc import Generator diff --git a/test/unit/test_devops.py b/test/unit/test_devops.py index c8615dbff..f08ffe0a3 100644 --- a/test/unit/test_devops.py +++ b/test/unit/test_devops.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" DevOps platforms tests """ +"""DevOps platforms tests""" import pytest import utilities as tutil diff --git a/test/unit/test_findings.py b/test/unit/test_findings.py index 7a33735ff..52efb5c49 100644 --- a/test/unit/test_findings.py +++ b/test/unit/test_findings.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-findings-export tests """ +"""sonar-findings-export tests""" import os import sys @@ -56,7 +56,7 @@ f"--{opt.KEY_REGEXP} training:security -{opt.BRANCH_REGEXP_SHORT} main", f"--{opt.USE_FINDINGS} -{opt.KEY_REGEXP_SHORT} ({tutil.PROJECT_0}|{tutil.PROJECT_1})", f"--{opt.APPS} -{opt.KEY_REGEXP_SHORT} APP_TEST --{opt.BRANCH_REGEXP} .+", - f"--{opt.PORTFOLIOS} -{opt.KEY_REGEXP_SHORT} Banking -{opt.REPORT_FILE_SHORT} {tutil.CSV_FILE}", + # See issue #2011 f"--{opt.PORTFOLIOS} -{opt.KEY_REGEXP_SHORT} Banking -{opt.REPORT_FILE_SHORT} {tutil.CSV_FILE}", f"-{opt.KEY_REGEXP_SHORT} {tutil.PROJECT_0} -{opt.BRANCH_REGEXP_SHORT} .+", f"--{opt.STATUSES} OPEN,CLOSED --{opt.SEVERITIES} {idefs.STD_SEVERITY_BLOCKER},{idefs.STD_SEVERITY_CRITICAL}", ] @@ -271,6 +271,7 @@ def test_findings_export(csv_file: Generator[str]) -> None: def test_findings_export_long(csv_file: Generator[str]) -> None: """test_findings_export_long""" + pytest.skip("Test too long") cmd_csv = f"{CMD} --{opt.REPORT_FILE} {csv_file}" for opts in __GOOD_OPTS_LONG: assert tutil.run_cmd(findings_export.main, f"{cmd_csv} {opts}") == e.OK @@ -301,11 +302,11 @@ def test_issues_count_3() -> None: def test_search_issues_by_project() -> None: """test_search_issues_by_project""" nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=tutil.LIVE_PROJECT, search_findings=True)) - assert 100 <= nb_issues <= 500 + assert 100 <= nb_issues <= 3700 nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=tutil.LIVE_PROJECT, params={"resolved": "false"})) - assert nb_issues < 500 + assert nb_issues < 1800 nb_issues = len(issues.search_by_project(endpoint=tutil.SQ, project_key=None)) - assert nb_issues > 1000 + assert nb_issues > 3500 def test_search_too_many_issues() -> None: diff --git a/test/unit/test_findings_sync.py b/test/unit/test_findings_sync.py index 1378c3273..44159a813 100644 --- a/test/unit/test_findings_sync.py +++ b/test/unit/test_findings_sync.py @@ -20,40 +20,60 @@ # """ - sonar-findings-sync tests +sonar-findings-sync tests """ import os from collections.abc import Generator +import pytest import utilities as tutil from sonar import errcodes as e +from sonar.util import constants as c from cli import findings_sync import cli.options as opt CMD = "sonar-findings-sync.py" -PLAT_OPTS = f"{tutil.SQS_OPTS} -U {os.getenv('SONAR_HOST_URL_TEST')} -T {os.getenv('SONAR_TOKEN_SYNC_USER')}" +TEST_URL = os.getenv("SONAR_HOST_URL_TEST") +TEST_TOKEN = os.getenv("SONAR_TOKEN_SYNC_USER") +PLAT_OPTS = f"{tutil.SQS_OPTS} --{opt.URL_TARGET} {TEST_URL} --{opt.TOKEN_TARGET} {TEST_TOKEN}" +TEST_OPTS = f"--{opt.URL} {TEST_URL} --{opt.TOKEN} {TEST_TOKEN} --{opt.KEY_REGEXP} TESTSYNC" SC_PLAT_OPTS = f"{tutil.SQS_OPTS} -U https://sonarcloud.io -T {os.getenv('SONAR_TOKEN_SONARCLOUD')} -O okorach" SYNC_OPTS = f"-{opt.KEY_REGEXP_SHORT} {tutil.LIVE_PROJECT} -K TESTSYNC" def test_sync_help() -> None: - """test_sync""" + """test_sync_help""" assert tutil.run_cmd(findings_sync.main, f"{CMD} -h") == e.ARGS_ERROR -def test_sync_proj(json_file: Generator[str]) -> None: - """test_sync""" +def test_sync_2_proj_all_branches(json_file: Generator[str]) -> None: + """test_sync_2_proj_all_branches""" assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK -def test_sync_branch(json_file: Generator[str]) -> None: - """test_sync""" - assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -b master -B main -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK +def test_sync_same_proj_all_branches(json_file: Generator[str]) -> None: + """test_sync_same_proj_all_branches""" + # Project sync across all branches of a given project + pytest.skip("No yet supported") + assert tutil.run_cmd(findings_sync.main, f"{CMD} {TEST_OPTS} --{opt.REPORT_FILE} {json_file}") == e.OK + + +def test_sync_same_proj_2_branches(json_file: Generator[str]) -> None: + """test_sync_same_proj_2_branches""" + assert tutil.run_cmd(findings_sync.main, f"{CMD} {TEST_OPTS} -b main -B develop -K TESTSYNC --{opt.REPORT_FILE} {json_file}") == e.OK + + +def test_sync_2_proj_branches(json_file: Generator[str]) -> None: + """test_sync_2_proj_branches""" + code = e.UNSUPPORTED_OPERATION if tutil.SQ.edition() == c.CE else e.OK + assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -b master -B main -{opt.REPORT_FILE_SHORT} {json_file}") == code + if tutil.SQ.edition() == c.CE: + assert tutil.run_cmd(findings_sync.main, f"{CMD} {PLAT_OPTS} {SYNC_OPTS} -B main -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK def test_sync_scloud(json_file: Generator[str]) -> None: - """test_sync""" - assert tutil.run_cmd(findings_sync.main, f"{CMD} {SC_PLAT_OPTS} {SYNC_OPTS} --threads 16 -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK + """test_sync_scloud""" + assert tutil.run_cmd(findings_sync.main, f"{CMD} {SC_PLAT_OPTS} {SYNC_OPTS} --{opt.NBR_THREADS} 16 -{opt.REPORT_FILE_SHORT} {json_file}") == e.OK diff --git a/test/unit/test_groups.py b/test/unit/test_groups.py index b7a741b9c..5504b85b9 100644 --- a/test/unit/test_groups.py +++ b/test/unit/test_groups.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Groups tests """ +"""Groups tests""" from collections.abc import Generator diff --git a/test/unit/test_hotspots.py b/test/unit/test_hotspots.py index 98296053e..dee67b27d 100644 --- a/test/unit/test_hotspots.py +++ b/test/unit/test_hotspots.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Test of the hotspots module and class, as well as changelog """ +"""Test of the hotspots module and class, as well as changelog""" import utilities as tutil from sonar import hotspots diff --git a/test/unit/test_housekeeper.py b/test/unit/test_housekeeper.py index 8cb4a1120..cbebd2452 100644 --- a/test/unit/test_housekeeper.py +++ b/test/unit/test_housekeeper.py @@ -21,7 +21,7 @@ """ - sonar-housekeeper tests +sonar-housekeeper tests """ import utilities as tutil diff --git a/test/unit/test_issues.py b/test/unit/test_issues.py index 595e1e9b6..91f232bae 100644 --- a/test/unit/test_issues.py +++ b/test/unit/test_issues.py @@ -19,11 +19,13 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Test of the issues module and class, as well as changelog """ +"""Test of the issues module and class, as well as changelog""" -from datetime import datetime +from datetime import datetime, timedelta import pytest +from requests.exceptions import ConnectionError + import utilities as tutil from sonar import issues, exceptions, logging from sonar import utilities as util @@ -31,7 +33,7 @@ ISSUE_FP = "ffbe8a34-cef6-4d5b-849d-bb2c25951c51" -ISSUE_FP_V9_9 = "AZi22OzWCMRVk7bHctjy" +ISSUE_FP_V9_9 = "AZi22OzbCMRVk7bHctjz" ISSUE_ACCEPTED = "c99ac40e-c2c5-43ef-bcc5-4cd077d1052f" ISSUE_ACCEPTED_V9_9 = "AZI6frkTuTfDeRt_hspx" ISSUE_W_MULTIPLE_CHANGELOGS = "6ae41c3b-c3d2-422f-a505-d355e7b0a268" @@ -96,13 +98,17 @@ def test_set_severity() -> None: assert issue.set_severity(new_sev) issue.refresh() assert issue.severity == new_sev - assert not issue.set_severity("NON_EXISTING") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_severity("NON_EXISTING") issue.set_severity(old_sev) assert not any(issue.set_mqr_severity(k, v) for k, v in new_impacts.items()) issue.refresh() assert issue.impacts == old_impacts + if tutil.SQ.version() < c.MQR_INTRO_VERSION: + return + tutil.SQ.set_mqr_mode(True) assert not issue.set_severity(new_sev) @@ -112,8 +118,10 @@ def test_set_severity() -> None: assert all(issue.set_mqr_severity(k, v) for k, v in new_impacts.items()) issue.refresh() assert issue.impacts == new_impacts - assert not issue.set_mqr_severity("MAINTAINABILITY", "NON_EXISTING") - assert not issue.set_mqr_severity("NON_EXISTING", "HIGH") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_mqr_severity("MAINTAINABILITY", "NON_EXISTING") + with pytest.raises(exceptions.SonarException): + issue.set_mqr_severity("NON_EXISTING", "HIGH") [issue.set_mqr_severity(k, v) for k, v in old_impacts.items()] tutil.SQ.set_mqr_mode(is_mqr) @@ -144,7 +152,8 @@ def test_set_type() -> None: assert issue.set_type(new_type) issue.refresh() assert issue.type == new_type - assert not issue.set_type("NON_EXISTING") + with pytest.raises(exceptions.UnsupportedOperation): + issue.set_type("NON_EXISTING") issue.set_type(old_type) @@ -170,7 +179,11 @@ def test_changelog() -> None: assert str(issue) == f"Issue key '{issue_key}'" assert issue.is_false_positive() changelog_l = list(issue.changelog(manual_only=False).values()) - if tutil.SQ.version() >= (25, 1, 0): + if tutil.SQ.version() < (10, 0, 0): + nb_changes = 4 + elif tutil.SQ.version() >= (2025, 4, 2): + nb_changes = 16 + elif tutil.SQ.version() >= (25, 1, 0): nb_changes = 8 else: nb_changes = 1 @@ -194,14 +207,17 @@ def test_changelog() -> None: assert not changelog.is_assignment() assert changelog.assignee() is None assert changelog.assignee(False) is None - if tutil.SQ.version() >= (10, 0, 0): - assert datetime(2025, 2, 12) <= changelog.date_time().replace(tzinfo=None) < datetime(2025, 2, 14) - assert changelog.author() is None + author = None + delta = timedelta(days=1) + if tutil.SQ.version() >= (2025, 5, 0): + date_change = datetime(2025, 10, 12) + elif tutil.SQ.version() >= (10, 0, 0): + date_change = datetime(2025, 2, 13) else: - assert datetime(2025, 8, 16) <= changelog.date_time().replace(tzinfo=None) < datetime(2025, 8, 18) - assert changelog.author() == "admin" - assert not changelog.is_tag() - assert changelog.get_tags() == [] + date_change = datetime(2025, 10, 10) + author = "admin" + assert date_change <= changelog.date_time().replace(tzinfo=None) < date_change + delta + assert changelog.author() == author def test_multiple_changelogs(): @@ -240,10 +256,13 @@ def test_request_error() -> None: """test_request_error""" issues_d = issues.search_by_project(endpoint=tutil.TEST_SQ, project_key=tutil.PROJECT_1) issue = list(issues_d.values())[0] + url = tutil.TEST_SQ.local_url tutil.TEST_SQ.local_url = "http://localhost:3337" - assert not issue.add_comment("Won't work") - - assert not issue.assign("admin") + with pytest.raises(ConnectionError): + issue.add_comment("Won't work") + with pytest.raises(ConnectionError): + issue.assign("admin") + tutil.TEST_SQ.local_url = url def test_transitions() -> None: @@ -252,28 +271,37 @@ def test_transitions() -> None: issue = list(issues_d.values())[0] assert issue.confirm() - assert not issue.confirm() + with pytest.raises(exceptions.UnsupportedOperation): + issue.confirm() assert issue.unconfirm() - assert not issue.unconfirm() + with pytest.raises(exceptions.UnsupportedOperation): + issue.unconfirm() assert issue.resolve_as_fixed() - assert not issue.resolve_as_fixed() + with pytest.raises(exceptions.UnsupportedOperation): + issue.resolve_as_fixed() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + assert not issue.reopen() if tutil.SQ.version() >= c.ACCEPT_INTRO_VERSION: assert issue.accept() - assert not issue.accept() + with pytest.raises(exceptions.UnsupportedOperation): + issue.accept() else: assert issue.mark_as_wont_fix() - assert not issue.mark_as_wont_fix() + with pytest.raises(exceptions.UnsupportedOperation): + issue.mark_as_wont_fix() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + issue.reopen() assert issue.mark_as_false_positive() - assert not issue.mark_as_false_positive() + with pytest.raises(exceptions.UnsupportedOperation): + issue.mark_as_false_positive() assert issue.reopen() - assert not issue.reopen() + with pytest.raises(exceptions.UnsupportedOperation): + issue.reopen() def test_search_first() -> None: diff --git a/test/unit/test_loc.py b/test/unit/test_loc.py index 0e2cd5778..a06fb4da1 100644 --- a/test/unit/test_loc.py +++ b/test/unit/test_loc.py @@ -20,7 +20,7 @@ # """ - sonar-loc tests +sonar-loc tests """ from collections.abc import Generator diff --git a/test/unit/test_logging.py b/test/unit/test_logging.py index 57be4e652..7a51b602e 100644 --- a/test/unit/test_logging.py +++ b/test/unit/test_logging.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Logging tests """ +"""Logging tests""" import os from collections.abc import Generator diff --git a/test/unit/test_measures.py b/test/unit/test_measures.py index 50b26a9bc..de0935503 100644 --- a/test/unit/test_measures.py +++ b/test/unit/test_measures.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-measures-export tests """ +"""sonar-measures-export tests""" from collections.abc import Generator diff --git a/test/unit/test_migration.py b/test/unit/test_migration.py index 7b2a59635..f61478f0f 100644 --- a/test/unit/test_migration.py +++ b/test/unit/test_migration.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" sonar-migration tests """ +"""sonar-migration tests""" from collections.abc import Generator diff --git a/test/unit/test_orgs.py b/test/unit/test_orgs.py index 9bd147432..60f1c0bc1 100644 --- a/test/unit/test_orgs.py +++ b/test/unit/test_orgs.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" portfolio tests """ +"""portfolio tests""" from collections.abc import Generator import time diff --git a/test/unit/test_platform.py b/test/unit/test_platform.py index 9a39ad27a..2bf79555f 100644 --- a/test/unit/test_platform.py +++ b/test/unit/test_platform.py @@ -19,10 +19,11 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" platform tests """ +"""platform tests""" import json -from requests import RequestException +import requests.exceptions +from datetime import datetime import pytest import utilities as tutil @@ -81,10 +82,11 @@ def test_wrong_url() -> None: tutil.TEST_SQ.local_url = "http://localhost:3337" tutil.TEST_SQ._sys_info = None - with pytest.raises(RequestException): + with pytest.raises(requests.exceptions.ConnectionError): tutil.TEST_SQ.sys_info() - tutil.TEST_SQ.global_permissions() + with pytest.raises(requests.exceptions.ConnectionError): + tutil.TEST_SQ.global_permissions() def test_set_webhooks() -> None: @@ -102,3 +104,8 @@ def test_convert_for_yaml() -> None: json_config = json.load(f)["globalSettings"] yaml_json = platform.convert_for_yaml(json_config.copy()) assert len(yaml_json) == len(json_config) + + +def test_release_date() -> None: + assert datetime(2022, 1, 1).date() < tutil.SQ.release_date() <= datetime.today().date() + assert tutil.SC.release_date() is None diff --git a/test/unit/test_portfolios.py b/test/unit/test_portfolios.py index 4669a9331..2f24a0266 100644 --- a/test/unit/test_portfolios.py +++ b/test/unit/test_portfolios.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" portfolio tests """ +"""portfolio tests""" from collections.abc import Generator import time diff --git a/test/unit/test_project_export.py b/test/unit/test_project_export.py index e6eb48d54..84968c8b0 100644 --- a/test/unit/test_project_export.py +++ b/test/unit/test_project_export.py @@ -20,7 +20,7 @@ # -""" sonar-projects tests """ +"""sonar-projects tests""" from collections.abc import Generator diff --git a/test/unit/test_projects.py b/test/unit/test_projects.py index f030506ac..e61b1b315 100644 --- a/test/unit/test_projects.py +++ b/test/unit/test_projects.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" projects tests """ +"""projects tests""" from collections.abc import Generator import pytest @@ -54,12 +54,15 @@ def test_create_delete() -> None: """test_create_delete""" proj = projects.Project.create(endpoint=tutil.SQ, key=tutil.TEMP_KEY, name="temp") assert proj.key == tutil.TEMP_KEY + assert proj.main_branch_name() == "main" if tutil.SQ.edition() != c.CE: assert proj.main_branch().name == "main" proj.rename_main_branch("foobar") assert proj.main_branch().name == "foobar" else: - assert proj.main_branch_name() == "main" + with pytest.raises(exceptions.UnsupportedOperation): + proj.main_branch() + assert proj.delete() with pytest.raises(exceptions.ObjectNotFound): proj.refresh() @@ -115,13 +118,31 @@ def test_export_sync() -> None: def test_import_async() -> None: """test_import_async""" proj = projects.Project.get_object(endpoint=tutil.SQ, key=tutil.PROJECT_1) - assert proj.import_zip(asynchronous=True) == "ASYNC_SUCCESS" + if tutil.SQ.edition() == c.CE: + with pytest.raises(exceptions.UnsupportedOperation): + proj.import_zip(asynchronous=True) + else: + assert proj.import_zip(asynchronous=True) == "ASYNC_SUCCESS" def test_import_sync() -> None: """test_import_sync""" proj = projects.Project.get_object(endpoint=tutil.SQ, key=tutil.PROJECT_1) - assert proj.import_zip(asynchronous=True).startswith("FAILED") + if tutil.SQ.edition() == c.CE: + with pytest.raises(exceptions.UnsupportedOperation): + proj.import_zip(asynchronous=False) + else: + assert proj.import_zip(asynchronous=False).startswith("FAILED") + + +def test_import_no_zip(get_test_project: Generator[projects.Project]) -> None: + """test_import_no_zip""" + if tutil.SQ.edition() == c.CE: + pytest.skip("No zip import in Community Build") + assert get_test_project.import_zip(asynchronous=False) == f"FAILED/ZIP_MISSING" + get_test_project.key = "non-existing" + res = get_test_project.import_zip(asynchronous=False) + assert res.startsWith("FAILED/") and "not found" in res def test_monorepo() -> None: @@ -135,10 +156,13 @@ def test_monorepo() -> None: def test_get_findings() -> None: """test_get_findings""" proj = projects.Project.get_object(endpoint=tutil.SQ, key=tutil.LIVE_PROJECT) - assert len(proj.get_findings(branch="non-existing-branch")) == 0 + with pytest.raises(exceptions.ObjectNotFound): + proj.get_findings(branch="non-existing-branch") if tutil.SQ.edition() != c.CE: assert len(proj.get_findings(branch="develop")) > 0 - assert len(proj.get_findings(pr="1")) == 0 + with pytest.raises(exceptions.ObjectNotFound): + proj.get_findings(pr="1") + assert len(proj.get_findings(pr="5")) == 0 def test_count_third_party_issues() -> None: @@ -193,6 +217,11 @@ def test_already_exists() -> None: projects.Project.create(endpoint=tutil.SQ, key=tutil.EXISTING_PROJECT, name="name") +def test_exists() -> None: + assert projects.exists(tutil.SQ, tutil.LIVE_PROJECT) + assert not projects.exists(tutil.SQ, "non-existing") + + def test_binding() -> None: """test_binding""" if tutil.SQ.edition() == c.CE: @@ -220,11 +249,13 @@ def test_import_wrong_key(get_test_project: Generator[projects.Project]) -> None """test_import_wrong_key""" proj = get_test_project proj.key = tutil.NON_EXISTING_KEY - expected_exception = exceptions.ObjectNotFound if tutil.SQ.edition() in (c.EE, c.DCE) else exceptions.UnsupportedOperation if tutil.SQ.edition() in (c.EE, c.DCE): - with pytest.raises(expected_exception): + assert proj.import_zip(asynchronous=True) == "FAILED/PROJECT_NOT_FOUND" + assert proj.import_zip(asynchronous=False) == "FAILED/PROJECT_NOT_FOUND" + else: + with pytest.raises(exceptions.UnsupportedOperation): proj.import_zip(asynchronous=True) - with pytest.raises(expected_exception): + with pytest.raises(exceptions.UnsupportedOperation): proj.import_zip(asynchronous=False) @@ -241,7 +272,8 @@ def test_set_links(get_test_project: Generator[projects.Project]) -> None: proj = get_test_project proj.set_links({"links": [{"type": "custom", "name": "google", "url": "https://google.com"}]}) proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_links({"links": [{"type": "custom", "name": "yahoo", "url": "https://yahoo.com"}]}) + with pytest.raises(exceptions.ObjectNotFound): + proj.set_links({"links": [{"type": "custom", "name": "yahoo", "url": "https://yahoo.com"}]}) def test_set_tags(get_test_project: Generator[projects.Project]) -> None: @@ -263,37 +295,43 @@ def test_set_quality_gate(get_test_project: Generator[projects.Project], get_tes qg = get_test_quality_gate assert proj.set_quality_gate(qg.name) assert not proj.set_quality_gate(None) - assert not proj.set_quality_gate(tutil.NON_EXISTING_KEY) + with pytest.raises(exceptions.ObjectNotFound): + proj.set_quality_gate(tutil.NON_EXISTING_KEY) proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_quality_gate(qg.name) + with pytest.raises(exceptions.ObjectNotFound): + assert not proj.set_quality_gate(qg.name) def test_ai_code_assurance(get_test_project: Generator[projects.Project]) -> None: """test_ai_code_assurance""" proj = get_test_project - if tutil.SQ.version() >= (10, 7, 0) and tutil.SQ.edition() != c.CE: - proj = get_test_project - assert proj.set_contains_ai_code(True) - assert proj.get_ai_code_assurance() in ( - "CONTAINS_AI_CODE", - "AI_CODE_ASSURED", - "AI_CODE_ASSURANCE_ON", - "AI_CODE_ASSURANCE_OFF", - "AI_CODE_ASSURANCE_PASS", - "AI_CODE_ASSURANCE_FAIL", - "NONE", - ) - assert proj.set_contains_ai_code(False) - assert proj.get_ai_code_assurance() == "NONE" - proj.key = tutil.NON_EXISTING_KEY - assert not proj.set_contains_ai_code(True) - assert proj.get_ai_code_assurance() is None - assert not proj.set_contains_ai_code(False) - assert proj.get_ai_code_assurance() is None - else: + if tutil.SQ.version() < (10, 7, 0) or tutil.SQ.edition() == c.CE: with pytest.raises(exceptions.UnsupportedOperation): proj.get_ai_code_assurance() + return + proj = get_test_project + assert proj.set_contains_ai_code(True) + assert proj.get_ai_code_assurance() in ( + "CONTAINS_AI_CODE", + "AI_CODE_ASSURED", + "AI_CODE_ASSURANCE_ON", + "AI_CODE_ASSURANCE_OFF", + "AI_CODE_ASSURANCE_PASS", + "AI_CODE_ASSURANCE_FAIL", + "NONE", + ) + assert proj.set_contains_ai_code(False) + assert proj.get_ai_code_assurance() == "NONE" + proj.key = tutil.NON_EXISTING_KEY + with pytest.raises(exceptions.ObjectNotFound): + proj.set_contains_ai_code(True) + with pytest.raises(exceptions.ObjectNotFound): + assert proj.get_ai_code_assurance() + with pytest.raises(exceptions.ObjectNotFound): + proj.set_contains_ai_code(False) + with pytest.raises(exceptions.ObjectNotFound): + proj.get_ai_code_assurance() def test_set_quality_profile(get_test_project: Generator[projects.Project], get_test_qp: Generator[qualityprofiles.QualityProfile]) -> None: @@ -331,8 +369,10 @@ def test_wrong_key_2(get_test_project: Generator[projects.Project]) -> None: """test_wrong_key""" proj = get_test_project proj.key = tutil.NON_EXISTING_KEY - assert proj.webhooks() is None - assert proj.links() is None + with pytest.raises(exceptions.ObjectNotFound): + _ = proj.webhooks() + with pytest.raises(exceptions.ObjectNotFound): + _ = proj.links() # assert proj.quality_gate() is None with pytest.raises(exceptions.ObjectNotFound): proj.audit({}) diff --git a/test/unit/test_qg.py b/test/unit/test_qg.py index 8f7677810..145b52b7e 100644 --- a/test/unit/test_qg.py +++ b/test/unit/test_qg.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" quality gates tests """ +"""quality gates tests""" from collections.abc import Generator import json @@ -27,6 +27,7 @@ import utilities as tutil from sonar import qualitygates, exceptions, logging +from sonar.util import constants as c def test_get_object(get_loaded_qg: Generator[qualitygates.QualityGate]) -> None: @@ -192,18 +193,20 @@ def test_import_config() -> None: qg.delete() except exceptions.ObjectNotFound: pass - conf = { - "qualityGates": { - "TEMP GATE": { - "conditions": [ - "new_duplicated_lines_density >= 3%", - "new_software_quality_maintainability_rating >= A", - "new_software_quality_reliability_issues >= 0", - "new_software_quality_security_issues >= 0", - ] - } - } - } + conds = ["new_duplicated_lines_density >= 3%"] + if tutil.SQ.version() < c.MQR_INTRO_VERSION: + conds += [ + "new_bugs >= 0", + "new_vulnerabilities >= 0", + "new_maintainability_rating >= A", + ] + else: + conds += [ + "new_software_quality_maintainability_rating >= A", + "new_software_quality_reliability_issues >= 0", + "new_software_quality_security_issues >= 0", + ] + conf = {"qualityGates": {"TEMP GATE": {"conditions": conds}}} assert qualitygates.import_config(tutil.SQ, conf) qg = qualitygates.QualityGate.get_object(tutil.SQ, "TEMP GATE") assert len(qg.conditions()) == 4 diff --git a/test/unit/test_qp.py b/test/unit/test_qp.py index da7e7bef0..55d8cf9ed 100644 --- a/test/unit/test_qp.py +++ b/test/unit/test_qp.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" quality profiles tests """ +"""quality profiles tests""" from collections.abc import Generator import json @@ -159,7 +159,7 @@ def test_import() -> None: languages.Language.CACHE.clear() qualityprofiles.QualityProfile.CACHE.clear() # delete all quality profiles in test - _ = [qp.set_as_default() for qp in qualityprofiles.get_list(tutil.TEST_SQ).values() if qp.name == tutil.SONAR_WAY] + _ = [qp.set_as_default() for qp in qualityprofiles.get_list(tutil.TEST_SQ, use_cache=False).values() if qp.name == tutil.SONAR_WAY] qp_list = {o for o in qualityprofiles.get_list(tutil.TEST_SQ, use_cache=False).values() if not o.is_built_in and not o.is_default} _ = [o.delete() for o in qp_list] with open(f"{tutil.FILES_ROOT}/config.json", "r", encoding="utf-8") as f: diff --git a/test/unit/test_rules.py b/test/unit/test_rules.py index 9609406d7..617723afb 100644 --- a/test/unit/test_rules.py +++ b/test/unit/test_rules.py @@ -21,7 +21,7 @@ """ - sonar-rules tests +sonar-rules tests """ from collections.abc import Generator @@ -70,7 +70,7 @@ def test_rules_misspelled_language_2(csv_file: Generator[str]) -> None: def test_get_rule() -> None: """test_get_rule""" - myrule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + myrule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert str(myrule) == "rule key 'java:S127'" myrule = rules.Rule.load(endpoint=tutil.SQ, key="java:S127", data={}) assert str(myrule) == "rule key 'java:S127'" @@ -78,7 +78,7 @@ def test_get_rule() -> None: def test_set_tags() -> None: """test_set_tags""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule.set_tags(tutil.TAGS) assert my_rule.tags == sorted(tutil.TAGS) assert my_rule.reset_tags() @@ -87,7 +87,7 @@ def test_set_tags() -> None: def test_set_desc() -> None: """test_set_tags""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule.set_description("Blah blah") assert my_rule.custom_desc == "Blah blah" assert my_rule.reset_description() @@ -109,7 +109,7 @@ def test_facets() -> None: def test_get_rule_cache() -> None: """test_get_rule_cache""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert str(my_rule) == "rule key 'java:S127'" new_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") assert my_rule is new_rule @@ -143,7 +143,7 @@ def test_export_all() -> None: def test_new_taxo() -> None: """test_new_taxo""" - my_rule = rules.get_object(endpoint=tutil.SQ, key="java:S127") + my_rule = rules.Rule.get_object(endpoint=tutil.SQ, key="java:S127") if tutil.SQ.version() >= c.MQR_INTRO_VERSION: for qual, sev in my_rule.impacts().items(): assert qual in idefs.MQR_QUALITIES diff --git a/test/unit/test_settings.py b/test/unit/test_settings.py new file mode 100644 index 000000000..cf77f6f56 --- /dev/null +++ b/test/unit/test_settings.py @@ -0,0 +1,83 @@ +# sonar-tools tests +# Copyright (C) 2025 Olivier Korach +# mailto:olivier.korach AT gmail DOT com +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 3 of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with this program; if not, write to the Free Software Foundation, +# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +# + +"""Test of settings""" + +import utilities as tutil +from sonar import settings + + +def test_set_standard() -> None: + """test_set_standard""" + + o = settings.get_object(tutil.SQ, "sonar.java.file.suffixes") + val = o.value + new_val = [".jav", ".java", ".javacard"] + assert o.set(new_val) + assert sorted(o.value) == sorted(new_val) + + new_val = [".jav", ".java", ".javacard", ".jah"] + assert o.set(", ".join(new_val)) + assert sorted(o.value) == sorted(new_val) + + assert o.reset() + assert sorted(o.value) == sorted([".jav", ".java"]) + assert o.set(val) + assert sorted(o.value) == sorted(val) + + +def test_autodetect_ai() -> None: + """test_autodetect_ai""" + + o = settings.get_object(tutil.SQ, "sonar.autodetect.ai.code") + if tutil.SQ.version() < (2025, 1, 0): + assert o is None + return + + val = o.value + assert o.set(True) + assert o.value + assert o.set(False) + assert not o.value + assert o.set(val) + + +def test_mqr_mode() -> None: + """test_mqr_mode""" + o = settings.get_object(tutil.SQ, "sonar.multi-quality-mode.enabled") + if tutil.SQ.version() < (25, 0, 0): + assert o is None + return + val = o.value + assert o.set(True) + assert o.value + assert o.set(False) + assert not o.value + assert o.set(val) + + +def test_unsettable() -> None: + """test_unsettable""" + o = settings.get_object(tutil.SQ, "sonar.core.startTime") + assert o is not None + assert not o.set("2025-01-01") + o = settings.get_object(tutil.SQ, "sonar.auth.github.apiUrl") + assert o is not None + res = True if tutil.SQ.version() < (10, 0, 0) else False + assert o.set("https://api.github.com/") == res diff --git a/test/unit/test_sqobject.py b/test/unit/test_sqobject.py index 334356cf8..5647bc514 100644 --- a/test/unit/test_sqobject.py +++ b/test/unit/test_sqobject.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" projects tests """ +"""projects tests""" import pytest diff --git a/test/unit/test_tasks.py b/test/unit/test_tasks.py index 7a49bcdaa..933d72c8c 100644 --- a/test/unit/test_tasks.py +++ b/test/unit/test_tasks.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" Test of the tasks module and class """ +"""Test of the tasks module and class""" import utilities as tutil from sonar import tasks @@ -40,7 +40,7 @@ def test_task() -> None: assert task.status() == tasks.SUCCESS assert 100 <= task.execution_time() <= 100000 assert task.submitter() == "admin" - assert task.warning_count() > 0 + assert task.warning_count() == 0 assert task.error_message() is None diff --git a/test/unit/test_update_center.py b/test/unit/test_update_center.py index eba89aa95..0f674e6de 100644 --- a/test/unit/test_update_center.py +++ b/test/unit/test_update_center.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" update_center tests """ +"""update_center tests""" import datetime import sonar.util.update_center as uc diff --git a/test/unit/test_users.py b/test/unit/test_users.py index 930b747b2..f7e66444f 100644 --- a/test/unit/test_users.py +++ b/test/unit/test_users.py @@ -19,7 +19,7 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" users tests """ +"""users tests""" from collections.abc import Generator from datetime import datetime diff --git a/test/unit/test_webhooks.py b/test/unit/test_webhooks.py index b26978d9e..2de5dd7d6 100644 --- a/test/unit/test_webhooks.py +++ b/test/unit/test_webhooks.py @@ -19,9 +19,8 @@ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # -""" users tests """ +"""Webhooks tests""" -from collections.abc import Generator import pytest import utilities as tutil @@ -34,38 +33,45 @@ def test_get_object() -> None: """Test get_object and verify that if requested twice the same object is returned""" - webhook = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook = wh.WebHook.get_object(tutil.SQ, WEBHOOK) assert webhook.name == WEBHOOK assert str(webhook) == f"webhook '{WEBHOOK}'" assert webhook.url() == f"{tutil.SQ.external_url}/admin/webhooks" - webhook2 = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook2 = wh.WebHook.get_object(endpoint=tutil.SQ, name=WEBHOOK) assert webhook2 is webhook + + with pytest.raises(exceptions.ObjectNotFound) as e: + _ = wh.WebHook.get_object(endpoint=tutil.SQ, name=tutil.NON_EXISTING_KEY) + assert str(e.value).endswith(f"Webhook '{tutil.NON_EXISTING_KEY}' of project 'None' not found") + with pytest.raises(exceptions.ObjectNotFound) as e: + _ = wh.WebHook.get_object(endpoint=tutil.SQ, name=tutil.NON_EXISTING_KEY, project_key=tutil.LIVE_PROJECT) + assert str(e.value).endswith(f"Webhook '{tutil.NON_EXISTING_KEY}' of project '{tutil.LIVE_PROJECT}' not found") with pytest.raises(exceptions.ObjectNotFound) as e: - _ = wh.get_object(endpoint=tutil.SQ, name=tutil.NON_EXISTING_KEY) - assert str(e.value).endswith(f"Webhook '{tutil.NON_EXISTING_KEY}' not found") + _ = wh.WebHook.get_object(endpoint=tutil.SQ, name=WEBHOOK, project_key=tutil.LIVE_PROJECT) + assert str(e.value).endswith(f"Webhook '{WEBHOOK}' of project '{tutil.LIVE_PROJECT}' not found") def test_audit() -> None: """test_audit""" - webhook = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook = wh.WebHook.get_object(tutil.SQ, WEBHOOK) pbs = webhook.audit() assert len(pbs) == 1 assert pbs[0].rule_id == audit_rules.RuleId.FAILED_WEBHOOK - pbs = wh.audit(tutil.SQ, {"audit.webhooks": True}) + pbs = wh.audit(tutil.SQ) assert len(pbs) == 1 assert pbs[0].rule_id == audit_rules.RuleId.FAILED_WEBHOOK def test_update() -> None: """test_update""" - webhook = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook = wh.WebHook.get_object(tutil.SQ, WEBHOOK) old_url = webhook.webhook_url - new_url = "http://my.jenkins.server/sonar-webhook/" + new_url = "https://my.jenkins.server/sonar-webhook/" webhook.update(url=new_url) - webhook = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook = wh.WebHook.get_object(tutil.SQ, WEBHOOK) assert webhook.webhook_url == new_url webhook.update(url_target=old_url) - webhook = wh.get_object(endpoint=tutil.SQ, name=WEBHOOK) + webhook = wh.WebHook.get_object(tutil.SQ, WEBHOOK) assert webhook.webhook_url == old_url @@ -73,5 +79,24 @@ def test_export() -> None: """test_export""" exp = wh.export(tutil.SQ) assert len(exp) == 1 - first = list(exp.key())[0] + first = list(exp.keys())[0] assert exp[first]["url"].startswith("https://") + + +def test_create_delete() -> None: + """test_create_delete""" + if tutil.SQ.version() >= (10, 0, 0): + with pytest.raises(exceptions.SonarException): + # Secret too short + wh.WebHook.create(tutil.SQ, tutil.TEMP_KEY, "http://google.com", "Shhht", tutil.PROJECT_1) + hook = wh.WebHook.create(tutil.SQ, tutil.TEMP_KEY, "http://google.com", "Shhht012345678910", tutil.PROJECT_1) + assert hook.name == tutil.TEMP_KEY + assert hook.webhook_url == "http://google.com" + assert hook.secret == "Shhht012345678910" + assert hook.project == tutil.PROJECT_1 + + hook.refresh() + hook.delete() + if tutil.SQ.version() >= (10, 0, 0): + with pytest.raises(exceptions.ObjectNotFound): + hook.refresh() diff --git a/test/unit/utilities.py b/test/unit/utilities.py index d4728855f..984042331 100644 --- a/test/unit/utilities.py +++ b/test/unit/utilities.py @@ -20,7 +20,7 @@ # """ - test utilities +test utilities """ import os