Skip to content

Commit 507b3e9

Browse files
committed
Merge branch 'branch-3.16.x' into fix-2039
2 parents 1f34e99 + 4651e56 commit 507b3e9

File tree

176 files changed

+15046
-6289
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

176 files changed

+15046
-6289
lines changed

.github/workflows/build.yml

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,12 @@ jobs:
2828
- name: Install dependencies
2929
run: |
3030
python -m pip install --upgrade pip
31-
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
32-
if [ -f requirements-to-build.txt ]; then pip install -r requirements-to-build.txt; fi
31+
python -m pip install poetry
32+
poetry install
33+
- name: Build package
34+
run: |
35+
poetry build
36+
continue-on-error: false
3337
# Linting is done in the run_linters.sh script
3438

3539
- name: Prep tests
@@ -49,9 +53,10 @@ jobs:
4953
- name: Run linters
5054
working-directory: .
5155
run: |
56+
python -m pip install ruff pylint flake8
5257
chmod +x conf/run_linters.sh
5358
conf/run_linters.sh
54-
#- name: Cache SonarQube packages
59+
# - name: Cache SonarQube packages
5560
# uses: actions/cache@v4
5661
# with:
5762
# path: ./.sonar

.vscode/settings.json

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,5 +6,10 @@
66
"sonarlint.focusOnNewCode": true,
77
"pylint.args": [
88
"[\"--rcfile=conf/pylintrc\"]"
9-
]
9+
],
10+
"python.testing.pytestArgs": [
11+
"test"
12+
],
13+
"python.testing.unittestEnabled": false,
14+
"python.testing.pytestEnabled": true
1015
}

README.md

Lines changed: 38 additions & 291 deletions
Large diffs are not rendered by default.

cli/audit.py

Lines changed: 52 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,14 @@
1818
# along with this program; if not, write to the Free Software Foundation,
1919
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2020
#
21-
"""
21+
"""Audits a SonarQube platform"""
2222

23-
Audits a SonarQube platform
23+
from __future__ import annotations
2424

25-
"""
26-
import sys
2725
import json
2826
import csv
29-
from typing import TextIO
27+
import re
28+
from typing import TextIO, Optional
3029
from threading import Thread
3130
from queue import Queue
3231
from requests import RequestException
@@ -52,12 +51,14 @@
5251
options.WHAT_PORTFOLIOS: portfolios.audit,
5352
}
5453

54+
PROBLEM_KEYS = "problems"
55+
5556

5657
def _audit_sif(sysinfo: str, audit_settings: types.ConfigSettings) -> tuple[str, list[problem.Problem]]:
5758
"""Audits a SIF and return found problems"""
5859
log.info("Auditing SIF file '%s'", sysinfo)
5960
try:
60-
with open(sysinfo, "r", encoding="utf-8") as f:
61+
with open(sysinfo, encoding="utf-8") as f:
6162
sysinfo = json.loads(f.read())
6263
except json.decoder.JSONDecodeError:
6364
log.critical("File %s does not seem to be a legit JSON file", sysinfo)
@@ -72,34 +73,48 @@ def _audit_sif(sysinfo: str, audit_settings: types.ConfigSettings) -> tuple[str,
7273
return sif_obj.server_id(), sif_obj.audit(audit_settings)
7374

7475

76+
def __filter_problems(problems: list[problem.Problem], settings: types.ConfigSettings) -> list[problem.Problem]:
77+
"""Filters audit problems by severity and/or type and/or problem key"""
78+
if settings.get(options.SEVERITIES, None):
79+
log.debug("Filtering audit problems with severities: %s", settings[options.SEVERITIES])
80+
problems = [p for p in problems if str(p.severity) in settings[options.SEVERITIES]]
81+
if settings.get(options.TYPES, None):
82+
log.debug("Filtering audit problems with types: %s", settings[options.TYPES])
83+
problems = [p for p in problems if str(p.type) in settings[options.TYPES]]
84+
if settings.get(PROBLEM_KEYS, None):
85+
log.debug("Filtering audit problems with keys: %s", settings[PROBLEM_KEYS])
86+
problems = [p for p in problems if re.match(rf"^{settings[PROBLEM_KEYS]}$", str(p.rule_id))]
87+
return problems
88+
89+
7590
def write_csv(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.ConfigSettings) -> None:
76-
"""Writes the CSV file of audit problems"""
91+
"""Thread callback to write audit problems in a CSV file"""
7792
server_id = settings.get("SERVER_ID", None)
7893
with_url = settings.get("WITH_URL", False)
7994
csvwriter = csv.writer(fd, delimiter=settings.get("CSV_DELIMITER", ","))
8095
header = ["Server Id"] if server_id else []
81-
header += ["Audit Check", "Category", "Severity", "Message"]
96+
header += ["Problem", "Type", "Severity", "Message"]
8297
header += ["URL"] if with_url else []
8398
csvwriter.writerow(header)
8499
while (problems := queue.get()) is not util.WRITE_END:
100+
problems = __filter_problems(problems, settings)
85101
for p in problems:
86102
json_data = p.to_json(with_url)
87-
data = [] if not server_id else [server_id]
88-
data += list(json_data.values())
103+
data = [server_id] if server_id else []
104+
data += [json_data[k] for k in ("problem", "type", "severity", "message", "url") if k in json_data]
89105
csvwriter.writerow(data)
90106
queue.task_done()
91107
queue.task_done()
92108

93109

94110
def write_json(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.ConfigSettings) -> None:
95-
"""
96-
Thread to write problems in a JSON file
97-
"""
111+
"""Thread callback to write problems in a JSON file"""
98112
server_id = settings.get("SERVER_ID", None)
99113
with_url = settings.get("WITH_URL", False)
100114
comma = ""
101115
print("[", file=fd)
102116
while (problems := queue.get()) is not util.WRITE_END:
117+
problems = __filter_problems(problems, settings)
103118
for p in problems:
104119
json_data = p.to_json(with_url)
105120
if server_id:
@@ -113,7 +128,7 @@ def write_json(queue: Queue[list[problem.Problem]], fd: TextIO, settings: types.
113128

114129

115130
def _audit_sq(
116-
sq: platform.Platform, settings: types.ConfigSettings, what_to_audit: list[str] = None, key_list: types.KeyList = None
131+
sq: platform.Platform, settings: types.ConfigSettings, what_to_audit: Optional[list[str]] = None, key_list: Optional[types.KeyList] = None
117132
) -> list[problem.Problem]:
118133
"""Audits a SonarQube/Cloud platform"""
119134
everything = what_to_audit is None
@@ -170,6 +185,24 @@ def __parser_args(desc: str) -> object:
170185
nargs="*",
171186
help="Pass audit configuration settings on command line (-D<setting>=<value>)",
172187
)
188+
parser.add_argument(
189+
f"--{options.SEVERITIES}",
190+
required=False,
191+
default=None,
192+
help="Report only audit problems with the given severities (comma separate values LOW, MEDIUM, HIGH, CRITICAL)",
193+
)
194+
parser.add_argument(
195+
f"--{options.TYPES}",
196+
required=False,
197+
default=None,
198+
help="Report only audit problems of the given comma separated problem types",
199+
)
200+
parser.add_argument(
201+
f"--{PROBLEM_KEYS}",
202+
required=False,
203+
default=None,
204+
help="Report only audit problems whose type key matches the given regexp",
205+
)
173206
args = options.parse_and_check(parser=parser, logger_name=TOOL_NAME, verify_token=False)
174207
if args.sif is None and args.config is None:
175208
util.check_token(args.token)
@@ -178,9 +211,8 @@ def __parser_args(desc: str) -> object:
178211

179212
def __check_keys_exist(key_regexp: list[str], sq: platform.Platform, what: list[str]) -> None:
180213
"""Checks if project keys exist"""
181-
if key_regexp and "projects" in what:
182-
if len(component_helper.get_components(sq, "projects", key_regexp)) == 0:
183-
raise options.ArgumentsError(f"No projects found with key matching regexp '{key_regexp}'")
214+
if key_regexp and "projects" in what and len(component_helper.get_components(sq, "projects", key_regexp)) == 0:
215+
raise options.ArgumentsError(f"No projects found with key matching regexp '{key_regexp}'")
184216

185217

186218
def main() -> None:
@@ -194,14 +226,14 @@ def main() -> None:
194226
key, value = val[0].split("=", maxsplit=1)
195227
cli_settings[key] = value
196228
settings = audit_conf.load(TOOL_NAME, cli_settings)
229+
settings |= kwargs
197230
file = ofile = kwargs.pop(options.REPORT_FILE)
198231
fmt = util.deduct_format(kwargs[options.FORMAT], ofile)
199232
settings.update(
200233
{
201234
"FILE": file,
202235
"CSV_DELIMITER": kwargs[options.CSV_SEPARATOR],
203236
"WITH_URL": kwargs[options.WITH_URL],
204-
"threads": kwargs[options.NBR_THREADS],
205237
"format": fmt,
206238
}
207239
)
@@ -213,8 +245,8 @@ def main() -> None:
213245
file = kwargs["sif"]
214246
errcode = errcodes.SIF_AUDIT_ERROR
215247
(settings["SERVER_ID"], problems) = _audit_sif(file, settings)
216-
problem.dump_report(problems, file=ofile, server_id=settings["SERVER_ID"], format=fmt)
217-
248+
problems = __filter_problems(problems, settings)
249+
problem.dump_report(problems, file=ofile, server_id=settings["SERVER_ID"], fmt=fmt)
218250
else:
219251
sq = platform.Platform(**kwargs)
220252
sq.verify_connection()

cli/config.py

Lines changed: 31 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,10 @@
1919
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2020
#
2121
"""
22-
Exports SonarQube platform configuration as JSON
22+
Exports SonarQube platform configuration as JSON
2323
"""
24-
from typing import TextIO, Optional
24+
25+
from typing import TextIO, Any
2526
from threading import Thread
2627
from queue import Queue
2728

@@ -121,16 +122,20 @@ def __parse_args(desc: str) -> object:
121122
return options.parse_and_check(parser=parser, logger_name=TOOL_NAME)
122123

123124

124-
def __normalize_json(json_data: dict[str, any], remove_empty: bool = True, remove_none: bool = True) -> dict[str, any]:
125+
def __normalize_json(json_data: dict[str, Any], remove_empty: bool = True, remove_none: bool = True) -> dict[str, any]:
125126
"""Sorts a JSON file and optionally remove empty and none values"""
126127
log.info("Normalizing JSON - remove empty = %s, remove nones = %s", str(remove_empty), str(remove_none))
127-
if remove_empty:
128-
json_data = utilities.remove_empties(json_data)
129-
if remove_none:
130-
json_data = utilities.remove_nones(json_data)
128+
json_data = utilities.clean_data(json_data, remove_empty, remove_none)
131129
json_data = utilities.order_keys(json_data, *_SECTIONS_ORDER)
132130
for key in [k for k in _SECTIONS_TO_SORT if k in json_data]:
133-
json_data[key] = {k: json_data[key][k] for k in sorted(json_data[key])}
131+
if isinstance(json_data[key], (list, tuple, set)):
132+
if len(json_data[key]) > 0:
133+
sort_field = next((k for k in ("key", "name", "login") if k in json_data[key][0]), None)
134+
if sort_field:
135+
tmp_d = {v[sort_field]: v for v in json_data[key]}
136+
json_data[key] = list(dict(sorted(tmp_d.items())).values())
137+
else:
138+
json_data[key] = {k: json_data[key][k] for k in sorted(json_data[key])}
134139
return json_data
135140

136141

@@ -170,28 +175,37 @@ def write_objects(queue: Queue[types.ObjectJsonRepr], fd: TextIO, object_type: s
170175
"""
171176
done = False
172177
prefix = ""
178+
objects_exported_as_lists = ("projects", "applications", "users", "portfolios")
179+
objects_exported_as_whole = ("qualityGates", "groups")
173180
log.info("Waiting %s to write...", object_type)
174-
print(f'"{object_type}": ' + "{", file=fd)
181+
if object_type in objects_exported_as_lists:
182+
start, stop = ("[", "]")
183+
elif object_type in objects_exported_as_whole:
184+
start, stop = ("", "")
185+
else:
186+
start, stop = ("{", "}")
187+
print(f'"{object_type}": ' + start, file=fd)
175188
while not done:
176189
obj_json = queue.get()
177190
if not (done := obj_json is utilities.WRITE_END):
178191
if object_type == "groups":
179192
obj_json = __prep_json_for_write(obj_json, {**export_settings, EXPORT_EMPTY: True})
180193
else:
181194
obj_json = __prep_json_for_write(obj_json, export_settings)
182-
if object_type in ("projects", "applications", "portfolios", "users"):
183-
if object_type == "users":
184-
key = obj_json.pop("login", None)
185-
else:
186-
key = obj_json.pop("key", None)
187-
log.debug("Writing %s key '%s'", object_type[:-1], key)
195+
key = "" if isinstance(obj_json, list) else obj_json.get("key", obj_json.get("login", obj_json.get("name", "unknown")))
196+
log.debug("Writing %s key '%s'", object_type[:-1], key)
197+
if object_type in objects_exported_as_lists:
198+
print(f"{prefix}{utilities.json_dump(obj_json)}", end="", file=fd)
199+
elif object_type in objects_exported_as_whole:
200+
print(f"{prefix}{utilities.json_dump(obj_json)}", end="", file=fd)
201+
elif object_type in ("applications", "portfolios", "users"):
188202
print(f'{prefix}"{key}": {utilities.json_dump(obj_json)}', end="", file=fd)
189203
else:
190204
log.debug("Writing %s", object_type)
191205
print(f"{prefix}{utilities.json_dump(obj_json)[2:-1]}", end="", file=fd)
192206
prefix = ",\n"
193207
queue.task_done()
194-
print("\n}", file=fd, end="")
208+
print("\n" + stop, file=fd, end="")
195209
log.info("Writing %s complete", object_type)
196210

197211

@@ -256,10 +270,7 @@ def __prep_json_for_write(json_data: types.ObjectJsonRepr, export_settings: type
256270
if export_settings.get("MODE", "CONFIG") == "MIGRATION":
257271
return json_data
258272
if not export_settings.get("FULL_EXPORT", False):
259-
json_data = utilities.remove_nones(json_data)
260-
if not export_settings.get(EXPORT_EMPTY, False):
261-
log.debug("Removing empties")
262-
json_data = utilities.remove_empties(json_data)
273+
json_data = utilities.clean_data(json_data, remove_empty=not export_settings.get(EXPORT_EMPTY, False), remove_none=True)
263274
if export_settings.get("INLINE_LISTS", True):
264275
json_data = utilities.inline_lists(json_data, exceptions=("conditions",))
265276
return json_data

cli/cust_measures.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@
1919
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2020
#
2121
"""
22-
This script manipulates custom measures. You may:
22+
This script manipulates custom measures. You may:
2323
24-
Update a custom measure value:
25-
Usage: cust_measures.py -t <SQ_TOKEN> -u <SQ_URL> -k <projectKey> -m <metricKey> --updateValue <value>
24+
Update a custom measure value:
25+
Usage: cust_measures.py -t <SQ_TOKEN> -u <SQ_URL> -k <projectKey> -m <metricKey> --updateValue <value>
2626
"""
2727

2828
import sys

cli/findings_export.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@
1919
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2020
#
2121
"""
22-
This script exports findings as CSV, JSON, or SARIF
22+
This script exports findings as CSV, JSON, or SARIF
2323
24-
Usage: sonar-findings-export.py -t <SQ_TOKEN> -u <SQ_URL> [<filters>]
24+
Usage: sonar-findings-export.py -t <SQ_TOKEN> -u <SQ_URL> [<filters>]
2525
2626
"""
2727

@@ -125,7 +125,7 @@ def parse_args(desc: str) -> Namespace:
125125
parser.add_argument(
126126
f"--{options.SEVERITIES}",
127127
required=False,
128-
help="Comma separated severities among" + util.list_to_csv(idefs.STD_SEVERITIES + hotspots.SEVERITIES),
128+
help="Comma separated severities among " + util.list_to_csv(idefs.STD_SEVERITIES + hotspots.SEVERITIES),
129129
)
130130
parser.add_argument(
131131
f"--{options.TYPES}",
@@ -373,6 +373,12 @@ def main() -> None:
373373
key_regexp=params.get(options.KEY_REGEXP, None),
374374
branch_regexp=branch_regexp,
375375
)
376+
if params[options.COMPONENT_TYPE] == "portfolios":
377+
components = []
378+
for comp in components_list:
379+
components += comp.components()
380+
components_list = components
381+
376382
if len(components_list) == 0:
377383
br = f"and branch matching regexp '{params[options.BRANCH_REGEXP]}'" if options.BRANCH_REGEXP in params else ""
378384
raise exceptions.SonarException(

cli/findings_sync.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@
1919
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2020
#
2121
"""
22-
This script propagates the manual issue changes (FP, WF, Change
23-
of severity, of issue type, comments) from:
24-
- One project to another (normally on different platforms but not necessarily).
25-
The 2 platform don't need to be identical in version, edition or plugins
26-
- One branch of a project to another branch of the same project (normally LLBs)
22+
This script propagates the manual issue changes (FP, WF, Change
23+
of severity, of issue type, comments) from:
24+
- One project to another (normally on different platforms but not necessarily).
25+
The 2 platform don't need to be identical in version, edition or plugins
26+
- One branch of a project to another branch of the same project (normally LLBs)
2727
28-
Only issues with a 100% match are synchronized. When there's a doubt, nothing is done
28+
Only issues with a 100% match are synchronized. When there's a doubt, nothing is done
2929
"""
3030

3131
import datetime

0 commit comments

Comments
 (0)