Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions sonar/aggregations.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
import sonar.platform as pf

import sonar.components as comp

from sonar import utilities
from sonar.audit.rules import get_rule
from sonar.audit.problem import Problem

Expand Down Expand Up @@ -110,5 +110,4 @@ def count(api: str, endpoint: pf.Platform, params: types.ApiParams = None) -> in
if params is None:
params = {}
params["ps"] = 1
data = json.loads(endpoint.get(api, params=params).text)
return data["paging"]["total"]
return utilities.nbr_total_elements(json.loads(endpoint.get(api, params=params).text))
3 changes: 1 addition & 2 deletions sonar/applications.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,8 +443,7 @@ def count(endpoint: pf.Platform) -> int:
:rtype: int
"""
check_supported(endpoint)
data = json.loads(endpoint.get(APIS["search"], params={"ps": 1, "filter": "qualifier = APP"}).text)
return data["paging"]["total"]
return util.nbr_total_elements(json.loads(endpoint.get(APIS["search"], params={"ps": 1, "filter": "qualifier = APP"}).text))


def check_supported(endpoint: pf.Platform) -> None:
Expand Down
33 changes: 6 additions & 27 deletions sonar/branches.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from sonar.util import types
import sonar.logging as log
import sonar.sqobject as sq
from sonar import components, syncer, settings, exceptions
from sonar import components, settings, exceptions
from sonar import projects
import sonar.utilities as util

Expand Down Expand Up @@ -220,9 +220,6 @@ def export(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr:
:return: The branch new code period definition
:rtype: str
"""
from sonar.issues import count as issue_count
from sonar.hotspots import count as hotspot_count

log.debug("Exporting %s", str(self))
data = {settings.NEW_CODE_PERIOD: self.new_code()}
if self.is_main():
Expand All @@ -234,27 +231,7 @@ def export(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr:
if export_settings.get("FULL_EXPORT", True):
data.update({"name": self.name, "project": self.concerned_object.key})
if export_settings.get("MODE", "") == "MIGRATION":
data["lastAnalysis"] = util.date_to_string(self.last_analysis())
lang_distrib = self.get_measure("ncloc_language_distribution")
loc_distrib = {}
if lang_distrib:
loc_distrib = {m.split("=")[0]: int(m.split("=")[1]) for m in lang_distrib.split(";")}
loc_distrib["total"] = self.loc()
data["ncloc"] = loc_distrib
tpissues = self.count_third_party_issues()
params = self.search_params()
data["issues"] = {
"thirdParty": tpissues if len(tpissues) > 0 else 0,
"falsePositives": issue_count(self.endpoint, issueStatuses=["FALSE_POSITIVE"], **params),
}
status = "accepted" if self.endpoint.version() >= (10, 2, 0) else "wontFix"
data["issues"][status] = issue_count(self.endpoint, issueStatuses=[status.upper()], **params)
data["hotspots"] = {
"acknowledged": hotspot_count(self.endpoint, resolution=["ACKNOWLEDGED"], **params),
"safe": hotspot_count(self.endpoint, resolution=["SAFE"], **params),
"fixed": hotspot_count(self.endpoint, resolution=["FIXED"], **params),
}
log.debug("%s has these notable issues %s", str(self), str(data["issues"]))
data.update(self.migration_export())
data = util.remove_nones(data)
return None if len(data) == 0 else data

Expand Down Expand Up @@ -330,17 +307,19 @@ def sync(self, another_branch: Branch, sync_settings: types.ConfigSettings) -> t
:return: sync report as tuple, with counts of successful and unsuccessful issue syncs
:rtype: tuple(report, counters)
"""
from sonar.syncer import sync_lists

report, counters = [], {}
log.info("Syncing %s (%s) and %s (%s) issues", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url)
(report, counters) = syncer.sync_lists(
(report, counters) = sync_lists(
list(self.get_issues().values()),
list(another_branch.get_issues().values()),
self,
another_branch,
sync_settings=sync_settings,
)
log.info("Syncing %s (%s) and %s (%s) hotspots", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url)
(tmp_report, tmp_counts) = syncer.sync_lists(
(tmp_report, tmp_counts) = sync_lists(
list(self.get_hotspots().values()),
list(another_branch.get_hotspots().values()),
self,
Expand Down
60 changes: 48 additions & 12 deletions sonar/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

"""
from __future__ import annotations
import math
import json

from datetime import datetime
Expand Down Expand Up @@ -104,9 +105,9 @@ def get_subcomponents(self, strategy: str = "children", with_issues: bool = Fals
"metricKeys": "bugs,vulnerabilities,code_smells,security_hotspots",
}
data = json.loads(self.get("measures/component_tree", params=parms).text)
nb_comp = data["paging"]["total"]
nb_comp = utilities.nbr_total_elements(data)
log.debug("Found %d subcomponents to %s", nb_comp, str(self))
nb_pages = (nb_comp + 500 - 1) // 500
nb_pages = math.ceil(nb_comp / 500)
comp_list = {}
parms["ps"] = 500
for page in range(nb_pages):
Expand All @@ -126,29 +127,35 @@ def get_subcomponents(self, strategy: str = "children", with_issues: bool = Fals

def get_issues(self, filters: types.ApiParams = None) -> dict[str, object]:
"""Returns list of issues for a component, optionally on branches or/and PRs"""
from sonar.issues import component_filter, search_all
from sonar.issues import search_all

log.info("Searching issues for %s with filters %s", str(self), str(filters))
params = utilities.replace_keys(_ALT_COMPONENTS, component_filter(self.endpoint), self.search_params())
params = self.search_params()
if filters is not None:
params.update(filters)
params["additionalFields"] = "comments"
issue_list = search_all(endpoint=self.endpoint, params=params)
self.nbr_issues = len(issue_list)
return issue_list

def count_third_party_issues(self, filters: types.ApiParams = None) -> dict[str, int]:
"""Returns list of issues for a component, optionally on branches or/and PRs"""
from sonar.issues import component_filter, count_by_rule
def count_specific_rules_issues(self, ruleset: list[str], filters: types.ApiParams = None) -> dict[str, int]:
"""Returns the count of issues of a component for a given ruleset"""
from sonar.issues import count_by_rule

third_party_rules = rules.third_party(self.endpoint)
params = utilities.replace_keys(_ALT_COMPONENTS, component_filter(self.endpoint), self.search_params())
params = self.search_params()
if filters is not None:
params.update(filters)
params["facets"] = "rules"
params["rules"] = [r.key for r in third_party_rules]
issues_count = {k: v for k, v in count_by_rule(endpoint=self.endpoint, **params).items() if v > 0}
return issues_count
params["rules"] = [r.key for r in ruleset]
return {k: v for k, v in count_by_rule(endpoint=self.endpoint, **params).items() if v > 0}

def count_third_party_issues(self, filters: types.ApiParams = None) -> dict[str, int]:
"""Returns the count of issues of a component corresponding to 3rd party rules"""
return self.count_specific_rules_issues(ruleset=rules.third_party(self.endpoint), filters=filters)

def count_instantiated_rules_issues(self, filters: types.ApiParams = None) -> dict[str, int]:
"""Returns the count of issues of a component corresponding to instantiated rules"""
return self.count_specific_rules_issues(ruleset=rules.instantiated(self.endpoint), filters=filters)

def get_hotspots(self, filters: types.ApiParams = None) -> dict[str, object]:
"""Returns list of hotspots for a component, optionally on branches or/and PRs"""
Expand All @@ -160,6 +167,35 @@ def get_hotspots(self, filters: types.ApiParams = None) -> dict[str, object]:
params.update(filters)
return search(endpoint=self.endpoint, filters=params)

def migration_export(self) -> dict[str, any]:
from sonar.issues import count as issue_count
from sonar.hotspots import count as hotspot_count

json_data = {"lastAnalysis": utilities.date_to_string(self.last_analysis())}
lang_distrib = self.get_measure("ncloc_language_distribution")
loc_distrib = {}
if lang_distrib:
loc_distrib = {m.split("=")[0]: int(m.split("=")[1]) for m in lang_distrib.split(";")}
loc_distrib["total"] = self.loc()
json_data["ncloc"] = loc_distrib
tpissues = self.count_third_party_issues()
inst_issues = self.count_instantiated_rules_issues()
params = self.search_params()
json_data["issues"] = {
"thirdParty": tpissues if len(tpissues) > 0 else 0,
"instantiatedRules": inst_issues if len(inst_issues) > 0 else 0,
"falsePositives": issue_count(self.endpoint, issueStatuses=["FALSE_POSITIVE"], **params),
}
status = "accepted" if self.endpoint.version() >= (10, 2, 0) else "wontFix"
json_data["issues"][status] = issue_count(self.endpoint, issueStatuses=[status.upper()], **params)
json_data["hotspots"] = {
"acknowledged": hotspot_count(self.endpoint, resolution=["ACKNOWLEDGED"], **params),
"safe": hotspot_count(self.endpoint, resolution=["SAFE"], **params),
"fixed": hotspot_count(self.endpoint, resolution=["FIXED"], **params),
}
log.debug("%s has these notable issues %s", str(self), str(json_data["issues"]))
return json_data

def get_measures(self, metrics_list: types.KeyList) -> dict[str, any]:
"""Retrieves a project list of measures

Expand Down
17 changes: 9 additions & 8 deletions sonar/hotspots.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from __future__ import annotations

import math
import json
import re
from http import HTTPStatus
Expand Down Expand Up @@ -71,7 +72,7 @@
SEVERITIES = ()

# Filters for search of hotspots are different than for issues :-(
_FILTERS_HOTSPOTS_REMAPPING = {"resolutions": "resolution", "statuses": "status", "componentsKey": PROJECT_FILTER_OLD, "components": PROJECT_FILTER}
_FILTERS_HOTSPOTS_REMAPPING = {"resolutions": "resolution", "statuses": "status", "componentsKey": PROJECT_FILTER, "components": PROJECT_FILTER}

_OBJECTS = {}

Expand Down Expand Up @@ -384,8 +385,7 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str,
:rtype: dict{<key>: <Hotspot>}
"""
hotspots_list = {}
new_params = get_search_filters(endpoint=endpoint, params=filters)
new_params = util.dict_remap(original_dict=new_params, remapping=_FILTERS_HOTSPOTS_REMAPPING)
new_params = sanitize_search_filters(endpoint=endpoint, params=filters)
filters_iterations = split_search_filters(new_params)
ps = 500 if "ps" not in new_params else new_params["ps"]
for inline_filters in filters_iterations:
Expand All @@ -395,16 +395,15 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str,
while True:
inline_filters["p"] = p
try:
resp = endpoint.get("hotspots/search", params=inline_filters, mute=(HTTPStatus.NOT_FOUND,))
data = json.loads(resp.text)
nbr_hotspots = data["paging"]["total"]
data = json.loads(endpoint.get("hotspots/search", params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text)
nbr_hotspots = util.nbr_total_elements(data)
except HTTPError as e:
if e.response.status_code == HTTPStatus.NOT_FOUND:
log.warning("No hotspots found with search params %s", str(inline_filters))
nbr_hotspots = 0
return {}
raise e
nbr_pages = (nbr_hotspots + ps - 1) // ps
nbr_pages = util.nbr_pages(data)
log.debug("Number of hotspots: %d - Page: %d/%d", nbr_hotspots, inline_filters["p"], nbr_pages)
if nbr_hotspots > 10000:
raise TooManyHotspotsError(
Expand Down Expand Up @@ -432,7 +431,7 @@ def get_object(endpoint: pf.Platform, key: str, data: dict[str] = None, from_exp
return _OBJECTS[uid]


def get_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types.ApiParams:
def sanitize_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types.ApiParams:
"""Returns the filtered list of params that are allowed for api/hotspots/search"""
log.debug("Sanitizing hotspot search criteria %s", str(params))
if params is None:
Expand All @@ -446,6 +445,8 @@ def get_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types.
criterias["status"] = "REVIEWED"
if endpoint.version() <= (10, 2, 0):
criterias = util.dict_remap(original_dict=criterias, remapping={PROJECT_FILTER: PROJECT_FILTER_OLD})
else:
criterias = util.dict_remap(original_dict=criterias, remapping={PROJECT_FILTER_OLD: PROJECT_FILTER})
criterias = util.dict_subset(criterias, SEARCH_CRITERIAS)
log.debug("Sanitized hotspot search criteria %s", str(criterias))
return criterias
Expand Down
43 changes: 18 additions & 25 deletions sonar/issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

from __future__ import annotations

import math
from datetime import date, datetime, timedelta
import json
import re
Expand Down Expand Up @@ -750,18 +751,13 @@ def search(endpoint: pf.Platform, params: ApiParams = None, raise_error: bool =
:raises: TooManyIssuesError if more than 10'000 issues found
"""
filters = pre_search_filters(endpoint=endpoint, params=params)
# if endpoint.version() >= (10, 2, 0):
# new_params = util.dict_remap_and_stringify(new_params, _FILTERS_10_2_REMAPPING)

log.debug("Search filters = %s", str(filters))
if not filters:
filters = {"ps": Issue.MAX_PAGE_SIZE}
elif "ps" not in filters:
if "ps" not in filters:
filters["ps"] = Issue.MAX_PAGE_SIZE

log.debug("Search filters = %s", str(filters))
issue_list = {}
data = json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text)
nbr_issues = data["paging"]["total"]
nbr_issues = util.nbr_total_elements(data)
nbr_pages = util.nbr_pages(data)
log.debug("Number of issues: %d - Nbr pages: %d", nbr_issues, nbr_pages)

Expand Down Expand Up @@ -822,37 +818,32 @@ def get_newest_issue(endpoint: pf.Platform, params: ApiParams = None) -> Union[d

def count(endpoint: pf.Platform, **kwargs) -> int:
"""Returns number of issues of a search"""
params = {} if not kwargs else kwargs.copy()
filters = pre_search_filters(endpoint=endpoint, params=params)
filters = pre_search_filters(endpoint=endpoint, params=kwargs)
filters["ps"] = 1
nbr_issues = json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text)["paging"]["total"]
nbr_issues = util.nbr_total_elements(json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text))
log.debug("Count issues with filters %s returned %d issues", str(kwargs), nbr_issues)
return nbr_issues


def count_by_rule(endpoint: pf.Platform, **kwargs) -> dict[str, int]:
"""Returns number of issues of a search"""
params = {} if not kwargs else kwargs.copy()
params["ps"] = 1
params["facets"] = "rules"
SLICE_SIZE = 50 # Search rules facets by bulks of 50
nbr_slices = 1
if "rules" in params:
nbr_slices = (len(params["rules"]) + SLICE_SIZE - 1) // SLICE_SIZE
SLICE_SIZE = 50 # Search rules facets by bulks of 50
if "rules" in kwargs:
ruleset = kwargs.pop("rules")
nbr_slices = math.ceil(len(ruleset) / SLICE_SIZE)
params = pre_search_filters(endpoint=endpoint, params=kwargs)
params.update({"ps": 1, "facets": "rules"})
rulecount = {}
for i in range(nbr_slices):
sliced_params = params.copy()
sliced_params["rules"] = ",".join(params["rules"][i * SLICE_SIZE : min((i + 1) * SLICE_SIZE - 1, len(params["rules"]))])
# log.debug("COUNT params = %s", str(sliced_params))
data = json.loads(endpoint.get(Issue.SEARCH_API, params=sliced_params).text)["facets"][0]["values"]
# log.debug("COUNT data results = %s", str(data))
params["rules"] = ",".join(ruleset[i * SLICE_SIZE : min((i + 1) * SLICE_SIZE - 1, len(ruleset))])
data = json.loads(endpoint.get(Issue.SEARCH_API, params=params).text)["facets"][0]["values"]
for d in data:
if d["val"] not in params["rules"]:
if d["val"] not in ruleset:
continue
if d["val"] not in rulecount:
rulecount[d["val"]] = 0
rulecount[d["val"]] += d["count"]
# log.debug("Rule counts = %s", util.json_dump(rulecount))
return rulecount


Expand All @@ -870,7 +861,9 @@ def pre_search_filters(endpoint: pf.Platform, params: ApiParams) -> ApiParams:
return {}
log.debug("Sanitizing issue search filters %s", str(params))
version = endpoint.version()
filters = util.dict_remap(original_dict=params.copy(), remapping={"project": COMPONENT_FILTER})
filters = util.dict_remap(
original_dict=params.copy(), remapping={"project": COMPONENT_FILTER, "application": COMPONENT_FILTER, "portfolio": COMPONENT_FILTER}
)
filters = util.dict_subset(util.remove_nones(filters), _SEARCH_CRITERIAS)
if version < (10, 2, 0):
# Starting from 10.2 - "componentKeys" was renamed "components"
Expand Down
3 changes: 1 addition & 2 deletions sonar/measures.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,8 +83,7 @@ def count_history(self, params: ApiParams = None) -> int:
if params is None:
params = {}
params.update({"component": self.concerned_object.key, "metrics": self.metric, "ps": 1})
data = json.loads(self.get(Measure.API_HISTORY, params=params).text)
return data["paging"]["total"]
return util.nbr_total_elements(json.loads(self.get(Measure.API_HISTORY, params=params).text))

def search_history(self, params: ApiParams = None) -> dict[str, any]:
"""Searches the history of the measure
Expand Down
Loading