Skip to content

Commit 40e5e0c

Browse files
authored
Fix sonar-config output like before (#1397)
* Remove unwanted properties in config mode * Add JSON normalization * Final file normalization * Extract platform and set CONFIG mode * Remove unused code * Sort, remove empty and nones before dumping * Remove premature removal of nones * Fix QP diffing in inheritance * Last fixes * Reduce complexity * Quality pass * Quality pass * Handle JSON decode error when normalizing JSON file * Black'ing
1 parent cb3ff51 commit 40e5e0c

File tree

5 files changed

+85
-70
lines changed

5 files changed

+85
-70
lines changed

cli/config.py

Lines changed: 37 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
Exports SonarQube platform configuration as JSON
2323
"""
2424
import sys
25-
import os
25+
from typing import TextIO
2626
from threading import Thread, Lock
2727
from queue import Queue
2828

@@ -78,6 +78,7 @@
7878

7979

8080
_EXPORT_CALLS = {
81+
"platform": [__JSON_KEY_PLATFORM, platform.basics],
8182
options.WHAT_SETTINGS: [__JSON_KEY_SETTINGS, platform.export],
8283
options.WHAT_RULES: [__JSON_KEY_RULES, rules.export],
8384
options.WHAT_PROFILES: [__JSON_KEY_PROFILES, qualityprofiles.export],
@@ -135,36 +136,6 @@ def __write_export(config: dict[str, str], file: str, format: str) -> None:
135136
print(utilities.json_dump(config), file=fd)
136137

137138

138-
def __remove_chars_at_end(file: str, nb_bytes: int) -> None:
139-
"""Writes the configuration in file"""
140-
with open(file, mode="rb+") as fd:
141-
fd.seek(-nb_bytes, os.SEEK_END)
142-
fd.truncate()
143-
144-
145-
def __add_project_header(file: str) -> None:
146-
"""Writes the configuration in file"""
147-
with open(file, mode="a", encoding="utf-8") as fd:
148-
print(',\n "projects": {\n', file=fd)
149-
150-
151-
def __add_project_footer(file: str) -> None:
152-
"""Closes projects section"""
153-
__remove_chars_at_end(file, 2)
154-
with open(file, mode="a", encoding="utf-8") as fd:
155-
print("\n }\n}", file=fd)
156-
157-
158-
def write_project(project_json: dict[str, any], file: str) -> None:
159-
"""
160-
writes a project JSON in a file
161-
"""
162-
key = project_json.pop("key")
163-
with _WRITE_LOCK:
164-
with utilities.open_file(file, mode="a") as fd:
165-
print(f'"{key}": {utilities.json_dump(project_json)},', file=fd)
166-
167-
168139
def __convert_for_yaml(json_export: dict[str, any]) -> dict[str, any]:
169140
"""Converts the default JSON produced by export to a modified version more suitable for YAML"""
170141
if "globalSettings" in json_export:
@@ -203,7 +174,8 @@ def __export_config_sync(endpoint: platform.Platform, what: list[str], **kwargs)
203174
utilities.exit_fatal(f"Project key(s) '{','.join(non_existing_projects)}' do(es) not exist", errcodes.NO_SUCH_KEY)
204175
log.info("Exporting configuration synchronously from %s", kwargs[options.URL])
205176
key_list = kwargs[options.KEYS]
206-
sq_settings = {__JSON_KEY_PLATFORM: endpoint.basics()}
177+
what.append("platform")
178+
sq_settings = {}
207179
for what_item, call_data in _EXPORT_CALLS.items():
208180
if what_item not in what:
209181
continue
@@ -214,38 +186,45 @@ def __export_config_sync(endpoint: platform.Platform, what: list[str], **kwargs)
214186
log.warning(e.message)
215187
except exceptions.ObjectNotFound as e:
216188
log.error(e.message)
217-
sq_settings = utilities.remove_empties(sq_settings)
218-
if not kwargs["dontInlineLists"]:
219-
sq_settings = utilities.inline_lists(sq_settings, exceptions=("conditions",))
189+
sq_settings = __prep_json_for_write(sq_settings, export_settings)
220190
__write_export(sq_settings, kwargs[options.REPORT_FILE], kwargs[options.FORMAT])
221191
log.info("Synchronous export of configuration from %s completed", kwargs["url"])
222192

223193

224-
def write_objects(queue: Queue, fd, object_type: str, export_settings: types.ConfigSettings) -> None:
194+
def __prep_json_for_write(json_data: types.ObjectJsonRepr, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr:
195+
"""Cleans up the JSON before writing"""
196+
json_data = utilities.sort_lists(json_data)
197+
if not export_settings.get("FULL_EXPORT", False):
198+
json_data = utilities.remove_empties(utilities.remove_nones(json_data))
199+
if export_settings.get("INLINE_LISTS", True):
200+
json_data = utilities.inline_lists(json_data, exceptions=("conditions",))
201+
return json_data
202+
203+
204+
def write_objects(queue: Queue[types.ObjectJsonRepr], fd: TextIO, object_type: str, export_settings: types.ConfigSettings) -> None:
225205
"""
226206
Thread to write projects in the JSON file
227207
"""
228-
done = False
229208
prefix = ""
230209
log.info("Waiting %s to write...", object_type)
231210
print(f'"{object_type}": ' + "{", file=fd)
232-
while not done:
211+
while True:
233212
obj_json = queue.get()
234-
done = obj_json is None
235-
if not done:
236-
if export_settings.get("INLINE_LISTS", True):
237-
obj_json = utilities.inline_lists(obj_json, exceptions=("conditions",))
238-
if object_type in ("projects", "applications", "portfolios", "users"):
239-
if object_type == "users":
240-
key = obj_json.pop("login", None)
241-
else:
242-
key = obj_json.pop("key", None)
243-
log.debug("Writing %s key '%s'", object_type[:-1], key)
244-
print(f'{prefix}"{key}": {utilities.json_dump(obj_json)}', end="", file=fd)
213+
if obj_json is None:
214+
queue.task_done()
215+
break
216+
obj_json = __prep_json_for_write(obj_json, export_settings)
217+
if object_type in ("projects", "applications", "portfolios", "users"):
218+
if object_type == "users":
219+
key = obj_json.pop("login", None)
245220
else:
246-
log.debug("Writing %s", object_type)
247-
print(f"{prefix}{utilities.json_dump(obj_json)[2:-1]}", end="", file=fd)
248-
prefix = ",\n"
221+
key = obj_json.pop("key", None)
222+
log.debug("Writing %s key '%s'", object_type[:-1], key)
223+
print(f'{prefix}"{key}": {utilities.json_dump(obj_json)}', end="", file=fd)
224+
else:
225+
log.debug("Writing %s", object_type)
226+
print(f"{prefix}{utilities.json_dump(obj_json)[2:-1]}", end="", file=fd)
227+
prefix = ",\n"
249228
queue.task_done()
250229
print("\n}", file=fd, end="")
251230
log.info("Writing %s complete", object_type)
@@ -256,10 +235,9 @@ def __export_config_async(endpoint: platform.Platform, what: list[str], **kwargs
256235
file = kwargs[options.REPORT_FILE]
257236
export_settings = {
258237
"INLINE_LISTS": not kwargs["dontInlineLists"],
259-
"EXPORT_DEFAULTS": True,
260-
# "FULL_EXPORT": kwargs["fullExport"],
261-
"FULL_EXPORT": False,
262-
"MODE": "MIGRATION",
238+
"EXPORT_DEFAULTS": kwargs["exportDefaults"],
239+
"FULL_EXPORT": kwargs["fullExport"],
240+
"MODE": "CONFIG",
263241
"THREADS": kwargs[options.NBR_THREADS],
264242
"SKIP_ISSUES": True,
265243
}
@@ -270,7 +248,7 @@ def __export_config_async(endpoint: platform.Platform, what: list[str], **kwargs
270248

271249
log.info("Exporting configuration from %s", kwargs[options.URL])
272250
key_list = kwargs[options.KEYS]
273-
sq_settings = {__JSON_KEY_PLATFORM: endpoint.basics()}
251+
what.append("platform")
274252
is_first = True
275253
q = Queue(maxsize=0)
276254
with utilities.open_file(file, mode="w") as fd:
@@ -287,12 +265,12 @@ def __export_config_async(endpoint: platform.Platform, what: list[str], **kwargs
287265
worker.daemon = True
288266
worker.name = f"Write{ndx[:1].upper()}{ndx[1:10]}"
289267
worker.start()
290-
sq_settings[ndx] = func(endpoint, export_settings=export_settings, key_list=key_list, write_q=q)
268+
func(endpoint, export_settings=export_settings, key_list=key_list, write_q=q)
291269
q.join()
292270
except exceptions.UnsupportedOperation as e:
293271
log.warning(e.message)
294-
sq_settings = utilities.remove_empties(sq_settings)
295272
print("\n}", file=fd)
273+
utilities.normalize_json_file(file, remove_empty=True, remove_none=True)
296274
log.info("Exporting migration data from %s completed", kwargs["url"])
297275

298276

migration/migration.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,7 @@ def __export_config(endpoint: platform.Platform, what: list[str], **kwargs) -> N
181181
# if not kwargs.get("dontInlineLists", False):
182182
# sq_settings = utilities.inline_lists(sq_settings, exceptions=("conditions",))
183183
print("\n}", file=fd)
184+
utilities.normalize_json_file(file, remove_empty=False, remove_none=True)
184185
log.info("Exporting migration data from %s completed", kwargs["url"])
185186

186187

sonar/qualityprofiles.py

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -381,7 +381,7 @@ def _treat_modified_rules(self, modified_rules: dict[str:str]) -> dict[str:str]:
381381
diff_rules[r_key]["params"].update(parms)
382382
return diff_rules
383383

384-
def diff(self, another_qp: QualityProfile, qp_json_data: dict[str:str] = None) -> tuple[dict[str:str], dict[str:str]]:
384+
def diff(self, another_qp: QualityProfile, qp_json_data: dict[str:str]) -> tuple[dict[str:str], dict[str:str]]:
385385
"""Returns the list of rules added or modified in self compared to another_qp (for inheritance)
386386
:param another_qp: The second quality profile to diff
387387
:type another_qp: QualityProfile
@@ -401,8 +401,6 @@ def diff(self, another_qp: QualityProfile, qp_json_data: dict[str:str] = None) -
401401
diff_rules["removedRules"] = {}
402402

403403
log.debug("Returning QP diff %s", str(diff_rules))
404-
if qp_json_data is None:
405-
return (diff_rules, qp_json_data)
406404
for index in ("addedRules", "modifiedRules", "removedRules"):
407405
if index not in diff_rules:
408406
continue
@@ -549,7 +547,7 @@ def audit(endpoint: pf.Platform, audit_settings: types.ConfigSettings = None) ->
549547
return problems
550548

551549

552-
def hierarchize_language(qp_list: dict[str, str]) -> types.ObjectJsonRepr:
550+
def hierarchize_language(qp_list: dict[str, str], endpoint: pf.Platform, language: str) -> types.ObjectJsonRepr:
553551
"""Organizes a flat list of quality profiles in inheritance hierarchy"""
554552
log.debug("Organizing QP list %s in hierarchy", str(qp_list.keys()))
555553
hierarchy = qp_list.copy()
@@ -559,17 +557,21 @@ def hierarchize_language(qp_list: dict[str, str]) -> types.ObjectJsonRepr:
559557
if qp_json_data["parentName"] not in hierarchy:
560558
log.critical("Can't find parent %s in quality profiles", qp_json_data["parentName"])
561559
continue
562-
parent_qp = hierarchy[qp_json_data.pop("parentName")]
560+
parent_qp_name = qp_json_data.pop("parentName")
561+
parent_qp = hierarchy[parent_qp_name]
563562
if _CHILDREN_KEY not in parent_qp:
564563
parent_qp[_CHILDREN_KEY] = {}
565-
parent_qp[_CHILDREN_KEY][qp_name] = qp_json_data
564+
this_qp = get_object(endpoint=endpoint, name=qp_name, language=language)
565+
(_, diff_data) = this_qp.diff(get_object(endpoint=endpoint, name=parent_qp_name, language=language), qp_json_data)
566+
diff_data.pop("rules", None)
567+
parent_qp[_CHILDREN_KEY][qp_name] = diff_data
566568
to_remove.append(qp_name)
567569
for qp_name in to_remove:
568570
hierarchy.pop(qp_name)
569571
return hierarchy
570572

571573

572-
def hierarchize(qp_list: types.ObjectJsonRepr) -> types.ObjectJsonRepr:
574+
def hierarchize(qp_list: types.ObjectJsonRepr, endpoint: pf.Platform) -> types.ObjectJsonRepr:
573575
"""Organize a flat list of QP in hierarchical (inheritance) fashion
574576
575577
:param qp_list: List of quality profiles
@@ -580,7 +582,7 @@ def hierarchize(qp_list: types.ObjectJsonRepr) -> types.ObjectJsonRepr:
580582
log.info("Organizing quality profiles in hierarchy")
581583
hierarchy = {}
582584
for lang, lang_qp_list in qp_list.items():
583-
hierarchy[lang] = hierarchize_language(lang_qp_list)
585+
hierarchy[lang] = hierarchize_language(lang_qp_list, endpoint=endpoint, language=lang)
584586
return hierarchy
585587

586588

@@ -604,7 +606,7 @@ def export(
604606
if lang not in qp_list:
605607
qp_list[lang] = {}
606608
qp_list[lang][name] = json_data
607-
qp_list = hierarchize(qp_list)
609+
qp_list = hierarchize(qp_list, endpoint=endpoint)
608610
if write_q:
609611
write_q.put(qp_list)
610612
write_q.put(None)

sonar/users.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,9 @@ def to_json(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr
389389

390390
if not self.endpoint.is_sonarcloud() and not export_settings["FULL_EXPORT"] and not json_data["local"]:
391391
json_data.pop("local")
392-
return util.remove_nones(util.filter_export(json_data, SETTABLE_PROPERTIES, export_settings["FULL_EXPORT"]))
392+
for key in "sonarQubeLastConnectionDate", "externalLogin", "externalProvider", "id", "managed":
393+
json_data.pop(key, None)
394+
return util.filter_export(json_data, SETTABLE_PROPERTIES, export_settings["FULL_EXPORT"])
393395

394396

395397
def search(endpoint: pf.Platform, params: types.ApiParams = None) -> dict[str, User]:

sonar/utilities.py

Lines changed: 33 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -185,6 +185,18 @@ def remove_empties(d: dict[str, any]) -> dict[str, any]:
185185
return new_d
186186

187187

188+
def sort_lists(d: dict[str, any]) -> dict[str, any]:
189+
"""Recursively removes empty lists and dicts and none from a dict"""
190+
# log.debug("Cleaning up %s", json_dump(d))
191+
new_d = d.copy()
192+
for k, v in d.items():
193+
if isinstance(v, list) and len(v) > 0 and isinstance(v[0], (str, int, float)):
194+
new_d[k] = sorted(v)
195+
elif isinstance(v, dict):
196+
new_d[k] = sort_lists(v)
197+
return new_d
198+
199+
188200
def dict_subset(d: dict[str, str], subset_list: list[str]) -> dict[str, str]:
189201
"""Returns the subset of dict only with subset_list keys"""
190202
return {key: d[key] for key in subset_list if key in d}
@@ -197,7 +209,7 @@ def allowed_values_string(original_str: str, allowed_values: list[str]) -> str:
197209

198210
def json_dump(jsondata: Union[list[str], dict[str, str]], indent: int = 3) -> str:
199211
"""JSON dump helper"""
200-
return json.dumps(remove_nones(jsondata), indent=indent, sort_keys=True, separators=(",", ": "))
212+
return json.dumps(jsondata, indent=indent, sort_keys=True, separators=(",", ": "))
201213

202214

203215
def csv_to_list(string: str, separator: str = ",") -> list[str]:
@@ -651,3 +663,23 @@ def list_to_dict(original_list: list[dict[str, any]], key_field: str) -> dict[st
651663
def dict_to_list(original_dict: dict[str, any], key_field: str, value_field: Optional[str] = "value") -> list[str, any]:
652664
"""Converts a dict to list adding dict key in list key_field"""
653665
return [{key_field: key, value_field: elem} if not isinstance(elem, dict) else {key_field: key, **elem} for key, elem in original_dict.items()]
666+
667+
668+
def normalize_json_file(file: Optional[str], remove_empty: bool = True, remove_none: bool = True) -> None:
669+
"""Sorts a JSON file and optionally remove empty and none values"""
670+
if file is None:
671+
log.info("Output is stdout, skipping normalization")
672+
return
673+
log.info("Normalizing JSON file '%s'", file)
674+
try:
675+
with open_file(file, mode="r") as fd:
676+
json_data = json.loads(fd.read())
677+
except json.decoder.JSONDecodeError:
678+
log.warning("JSON Decode error while normalizing file '%s', is file complete?", file)
679+
return
680+
if remove_empty:
681+
json_data = remove_empties(json_data)
682+
if remove_none:
683+
json_data = remove_nones(json_data)
684+
with open_file(file, mode="w") as fd:
685+
print(json_dump(json_data), file=fd)

0 commit comments

Comments
 (0)