Skip to content
2 changes: 1 addition & 1 deletion cli/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ def parse_and_check(parser: ArgumentParser, logger_name: str = None, verify_toke
log.info("sonar-tools version %s", version.PACKAGE_VERSION)
if os.getenv("IN_DOCKER", "No") == "Yes":
kwargs[URL] = kwargs[URL].replace("http://localhost", "http://host.docker.internal")
if log.get_level() == log.DEBUG:
if log.get_level() <= log.DEBUG:
sanitized_args = kwargs.copy()
sanitized_args[TOKEN] = utilities.redacted_token(sanitized_args[TOKEN])
if "tokenTarget" in sanitized_args:
Expand Down
8 changes: 4 additions & 4 deletions migration/migration.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ def write_objects(queue: Queue, fd, object_type: str) -> None:
obj_json = queue.get()
done = obj_json is None
if not done:
obj_json = utilities.remove_nones(obj_json)
if object_type in ("projects", "applications", "portfolios", "users"):
if object_type == "users":
key = obj_json.pop("login", None)
Expand Down Expand Up @@ -143,6 +144,7 @@ def __export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> N
utilities.exit_fatal(f"Project key(s) '{','.join(non_existing_projects)}' do(es) not exist", errcodes.NO_SUCH_KEY)

calls = {
"platform": [__JSON_KEY_PLATFORM, platform.basics],
options.WHAT_SETTINGS: [__JSON_KEY_SETTINGS, platform.export],
options.WHAT_RULES: [__JSON_KEY_RULES, rules.export],
options.WHAT_PROFILES: [__JSON_KEY_PROFILES, qualityprofiles.export],
Expand All @@ -153,10 +155,9 @@ def __export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> N
options.WHAT_USERS: [__JSON_KEY_USERS, users.export],
options.WHAT_GROUPS: [__JSON_KEY_GROUPS, groups.export],
}

what.append("platform")
log.info("Exporting configuration from %s", kwargs[options.URL])
key_list = kwargs[options.KEYS]
sq_settings = {__JSON_KEY_PLATFORM: endpoint.basics()}
is_first = True
q = Queue(maxsize=0)
with utilities.open_file(file, mode="w") as fd:
Expand All @@ -173,11 +174,10 @@ def __export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> N
worker.daemon = True
worker.name = f"Write{ndx[:1].upper()}{ndx[1:10]}"
worker.start()
sq_settings[ndx] = func(endpoint, export_settings=export_settings, key_list=key_list, write_q=q)
func(endpoint, export_settings=export_settings, key_list=key_list, write_q=q)
q.join()
except exceptions.UnsupportedOperation as e:
log.warning(e.message)
sq_settings = utilities.remove_empties(sq_settings)
# if not kwargs.get("dontInlineLists", False):
# sq_settings = utilities.inline_lists(sq_settings, exceptions=("conditions",))
print("\n}", file=fd)
Expand Down
19 changes: 13 additions & 6 deletions sonar/hotspots.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@

from __future__ import annotations

import math
import json
import re
from http import HTTPStatus
Expand Down Expand Up @@ -85,6 +84,12 @@ def __init__(self, nbr_issues, message):


class Hotspot(findings.Finding):
"""Abstraction of the Sonar hotspot concept"""

SEARCH_API = "hotspots/search"
MAX_PAGE_SIZE = 500
MAX_SEARCH = 10000

def __init__(self, endpoint: pf.Platform, key: str, data: types.ApiPayload = None, from_export: bool = False) -> None:
"""Constructor"""
super().__init__(endpoint=endpoint, key=key, data=data, from_export=from_export)
Expand Down Expand Up @@ -385,16 +390,17 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str,
"""
hotspots_list = {}
new_params = sanitize_search_filters(endpoint=endpoint, params=filters)
log.debug("Search hotspots with params %s", str(new_params))
filters_iterations = split_search_filters(new_params)
ps = 500 if "ps" not in new_params else new_params["ps"]
ps = Hotspot.MAX_PAGE_SIZE if "ps" not in new_params else new_params["ps"]
for inline_filters in filters_iterations:
p = 1
inline_filters["ps"] = ps
log.debug("Searching hotspots with sanitized filters %s", str(inline_filters))
while True:
inline_filters["p"] = p
try:
data = json.loads(endpoint.get("hotspots/search", params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text)
data = json.loads(endpoint.get(Hotspot.SEARCH_API, params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text)
nbr_hotspots = util.nbr_total_elements(data)
except HTTPError as e:
if e.response.status_code == HTTPStatus.NOT_FOUND:
Expand All @@ -404,10 +410,10 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str,
raise e
nbr_pages = util.nbr_pages(data)
log.debug("Number of hotspots: %d - Page: %d/%d", nbr_hotspots, inline_filters["p"], nbr_pages)
if nbr_hotspots > 10000:
if nbr_hotspots > Hotspot.MAX_SEARCH:
raise TooManyHotspotsError(
nbr_hotspots,
f"{nbr_hotspots} hotpots returned by api/hotspots/search, " "this is more than the max 10000 possible",
f"{nbr_hotspots} hotpots returned by api/{Hotspot.SEARCH_API}, this is more than the max {Hotspot.MAX_SEARCH} possible",
)

for i in data["hotspots"]:
Expand Down Expand Up @@ -500,6 +506,7 @@ def count(endpoint: pf.Platform, **kwargs) -> int:
"""Returns number of hotspots of a search"""
params = {} if not kwargs else kwargs.copy()
params["ps"] = 1
nbr_hotspots = len(search(endpoint=endpoint, filters=params))
params = sanitize_search_filters(endpoint, params)
nbr_hotspots = util.nbr_total_elements(json.loads(endpoint.get(Hotspot.SEARCH_API, params=params, mute=(HTTPStatus.NOT_FOUND,)).text))
log.debug("Hotspot counts with filters %s returned %d hotspots", str(kwargs), nbr_hotspots)
return nbr_hotspots
23 changes: 18 additions & 5 deletions sonar/platform.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import datetime
import json
import tempfile
import logging
import requests
import jprops
from requests.exceptions import HTTPError
Expand Down Expand Up @@ -226,7 +225,7 @@ def __run_request(
if kwargs.get("with_organization", True):
params["organization"] = self.organization
req_type, url = "", ""
if log.get_level() >= logging.DEBUG:
if log.get_level() <= log.DEBUG:
req_type = getattr(request, "__name__", repr(request)).upper()
url = self.__urlstring(api, params)
log.debug("%s: %s", req_type, url)
Expand Down Expand Up @@ -380,14 +379,17 @@ def __urlstring(self, api: str, params: types.ApiParams) -> str:
url_prefix = f"{str(self)}{api}"
if params is None:
return url_prefix
temp_params = params.copy()
for p in params:
if params[p] is None:
continue
sep = "?" if first else "&"
first = False
if isinstance(params[p], datetime.date):
params[p] = util.format_date(params[p])
url_prefix += f"{sep}{p}={requests.utils.quote(str(params[p]))}"
if isinstance(temp_params[p], datetime.date):
temp_params[p] = util.format_date(temp_params[p])
elif isinstance(temp_params[p], (list, tuple, set)):
temp_params[p] = ",".join(temp_params[p])
url_prefix += f"{sep}{p}={requests.utils.quote(str(temp_params[p]))}"
return url_prefix

def webhooks(self) -> dict[str, object]:
Expand Down Expand Up @@ -886,3 +888,14 @@ def export(
write_q.put(exp)
write_q.put(None)
return exp


def basics(
endpoint: Platform, export_settings: types.ConfigSettings, key_list: Optional[types.KeyList] = None, write_q: Optional[Queue] = None
) -> types.ObjectJsonRepr:
"""Returns an endpooint basic info (license, edition, version etc..)"""
exp = endpoint.basics()
if write_q:
write_q.put(exp)
write_q.put(None)
return exp
35 changes: 33 additions & 2 deletions sonar/portfolios.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@
_CREATE_API = "views/create"
_GET_API = "views/show"

MAX_PAGE_SIZE = 500
_PORTFOLIO_QUALIFIER = "VW"
_SUBPORTFOLIO_QUALIFIER = "SVW"

Expand All @@ -74,6 +73,7 @@
"visibility",
"permissions",
"projects",
"projectsList",
"portfolios",
"subPortfolios",
"applications",
Expand All @@ -88,6 +88,8 @@ class Portfolio(aggregations.Aggregation):
SEARCH_API = "views/search"
SEARCH_KEY_FIELD = "key"
SEARCH_RETURN_FIELD = "components"
MAX_PAGE_SIZE = 500
MAX_SEARCH = 10000

_OBJECTS = {}

Expand Down Expand Up @@ -301,7 +303,7 @@ def get_components(self) -> types.ApiPayload:
"component": self.key,
"metricKeys": "ncloc",
"strategy": "children",
"ps": 500,
"ps": Portfolio.MAX_PAGE_SIZE,
},
).text
)
Expand Down Expand Up @@ -359,6 +361,8 @@ def to_json(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr
if mode and "none" not in mode:
json_data["projects"] = mode
json_data["applications"] = self._applications
if export_settings.get("MODE", "") == "MIGRATION":
json_data["projectsList"] = self.get_project_list()
return json_data

def export(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr:
Expand Down Expand Up @@ -563,6 +567,33 @@ def recompute(self) -> bool:
key = self._root_portfolio.key if self._root_portfolio else self.key
return self.post("views/refresh", params={"key": key}).ok

def get_project_list(self) -> list[str]:
log.debug("Search %s projects list", str(self))
proj_key_list = []
page = 0
params = {"component": self.key, "ps": Portfolio.MAX_PAGE_SIZE, "qualifiers": "TRK", "strategy": "leaves", "metricKeys": "ncloc"}
while True:
page += 1
params["p"] = page
try:
data = json.loads(self.get("api/measures/component_tree", params=params).text)
nbr_projects = util.nbr_total_elements(data)
proj_key_list += [c["refKey"] for c in data["components"]]
except HTTPError as e:
if e.response.status_code in (HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND):
log.warning("HTTP Error %s while collecting projects from %s, stopping collection", str(e), str(self))
else:
log.critical("HTTP Error %s while collecting projects from %s, proceeding anyway", str(e), str(self))
break
nbr_pages = util.nbr_pages(data)
log.debug("Number of projects: %d - Page: %d/%d", nbr_projects, page, nbr_pages)
if nbr_projects > Portfolio.MAX_SEARCH:
log.warning("Can't collect more than %d projects from %s", Portfolio.MAX_SEARCH, str(self))
if page >= nbr_pages or page >= Portfolio.MAX_SEARCH / Portfolio.MAX_PAGE_SIZE:
break
log.debug("%s projects list = %s", str(self), str(proj_key_list))
return proj_key_list

def update(self, data: dict[str, str], recurse: bool) -> None:
"""Updates a portfolio with sonar-config JSON data, if recurse is true, this recurses in sub portfolios"""
log.debug("Updating %s with %s", str(self), util.json_dump(data))
Expand Down
3 changes: 2 additions & 1 deletion sonar/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -1486,7 +1486,8 @@ def __export_thread(queue: Queue[Project], results: dict[str, str], export_setti
with _CLASS_LOCK:
export_settings["EXPORTED"] += 1
nb, tot = export_settings["EXPORTED"], export_settings["NBR_PROJECTS"]
if nb % 10 == 0 or nb == tot:
log.debug("%d/%d projects exported (%d%%)", nb, tot, (nb * 100) // tot)
if nb % 10 == 0 or tot - nb < 10:
log.info("%d/%d projects exported (%d%%)", nb, tot, (nb * 100) // tot)
queue.task_done()

Expand Down
46 changes: 26 additions & 20 deletions sonar/qualityprofiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,7 +550,27 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings = None) ->
return problems


def hierarchize(qp_list: dict[str, str], endpoint: pf.Platform) -> types.ObjectJsonRepr:
def hierarchize_language(qp_list: dict[str, str]) -> types.ObjectJsonRepr:
"""Organizes a flat list of quality profiles in inheritance hierarchy"""
log.debug("Organizing QP list %s in hierarchy", str(qp_list.keys()))
hierarchy = qp_list.copy()
to_remove = []
for qp_name, qp_json_data in hierarchy.items():
if "parentName" in qp_json_data:
if qp_json_data["parentName"] not in hierarchy:
log.critical("Can't find parent %s in quality profiles", qp_json_data["parentName"])
continue
parent_qp = hierarchy[qp_json_data.pop("parentName")]
if _CHILDREN_KEY not in parent_qp:
parent_qp[_CHILDREN_KEY] = {}
parent_qp[_CHILDREN_KEY][qp_name] = qp_json_data
to_remove.append(qp_name)
for qp_name in to_remove:
hierarchy.pop(qp_name)
return hierarchy


def hierarchize(qp_list: types.ObjectJsonRepr) -> types.ObjectJsonRepr:
"""Organize a flat list of QP in hierarchical (inheritance) fashion

:param qp_list: List of quality profiles
Expand All @@ -559,31 +579,17 @@ def hierarchize(qp_list: dict[str, str], endpoint: pf.Platform) -> types.ObjectJ
:rtype: {<language>: {<qp_name>: {"children": <qp_list>; <qp_data>}}}
"""
log.info("Organizing quality profiles in hierarchy")
for lang, qpl in qp_list.copy().items():
for qp_name, qp_json_data in qpl.copy().items():
log.debug("Treating %s:%s", lang, qp_name)
if "parentName" not in qp_json_data:
continue
parent_qp_name = qp_json_data["parentName"]
qp_json_data.pop("rules", None)
log.debug("QP name '%s:%s' has parent '%s'", lang, qp_name, qp_json_data["parentName"])
if _CHILDREN_KEY not in qp_list[lang][qp_json_data["parentName"]]:
qp_list[lang][qp_json_data["parentName"]][_CHILDREN_KEY] = {}

this_qp = get_object(endpoint=endpoint, name=qp_name, language=lang)
(_, qp_json_data) = this_qp.diff(get_object(endpoint=endpoint, name=parent_qp_name, language=lang), qp_json_data)
qp_list[lang][parent_qp_name][_CHILDREN_KEY][qp_name] = qp_json_data
qp_list[lang].pop(qp_name)
qp_json_data.pop("parentName")
return qp_list
hierarchy = {}
for lang, lang_qp_list in qp_list.items():
hierarchy[lang] = hierarchize_language(lang_qp_list)
return hierarchy


def export(
endpoint: pf.Platform, export_settings: types.ConfigSettings, key_list: Optional[types.KeyList] = None, write_q: Optional[Queue] = None
) -> types.ObjectJsonRepr:
"""Exports all or a list of quality profiles configuration as dict

:param Platform endpoint: reference to the SonarQube platform
:param ConfigSettings export_settings: Export parameters
:param KeyList key_list: Unused
:return: Dict of quality profiles JSON representation
Expand All @@ -599,7 +605,7 @@ def export(
if lang not in qp_list:
qp_list[lang] = {}
qp_list[lang][name] = json_data
qp_list = hierarchize(qp_list, endpoint)
qp_list = hierarchize(qp_list)
if write_q:
write_q.put(qp_list)
write_q.put(None)
Expand Down
62 changes: 62 additions & 0 deletions test/test_migration.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,36 @@ def test_migration() -> None:
with open(file=util.JSON_FILE, mode="r", encoding="utf-8") as fh:
json_config = json.loads(fh.read())

for item in (
"platform",
"globalSettings",
"rules",
"qualityProfiles",
"qualityGates",
"projects",
"applications",
"portfolios",
"users",
"groups",
):
assert item in json_config

for p in json_config["projects"].values():
for item in (
"backgroundTasks",
"branches",
"detectedCi",
"lastAnalysis",
"issues",
"hotspots",
"name",
"ncloc",
"permissions",
"revision",
"visibility",
):
assert item in p

u = json_config["users"]["admin"]
assert "sonar-users" in u["groups"]
assert u["local"] and u["active"]
Expand Down Expand Up @@ -104,3 +134,35 @@ def test_migration() -> None:
assert json_config["projects"]["demo:github-actions-cli"]["detectedCi"] == "Github Actions"

util.clean(util.JSON_FILE)


def test_migration_skip_issues() -> None:
"""test_config_export"""
util.clean(util.JSON_FILE)
with pytest.raises(SystemExit) as e:
with patch.object(sys, "argv", OPTS + ["--skipIssues"]):
migration.main()
assert int(str(e.value)) == errcodes.OK
assert util.file_not_empty(util.JSON_FILE)
with open(file=util.JSON_FILE, mode="r", encoding="utf-8") as fh:
json_config = json.loads(fh.read())

for item in (
"platform",
"globalSettings",
"rules",
"qualityProfiles",
"qualityGates",
"projects",
"applications",
"portfolios",
"users",
"groups",
):
assert item in json_config

for p in json_config["projects"].values():
assert "issues" not in p
assert "hotspots" not in p

util.clean(util.JSON_FILE)