diff --git a/sonar/aggregations.py b/sonar/aggregations.py index 1810ee741..3743c220f 100644 --- a/sonar/aggregations.py +++ b/sonar/aggregations.py @@ -31,7 +31,7 @@ import sonar.platform as pf import sonar.components as comp - +from sonar import utilities from sonar.audit.rules import get_rule from sonar.audit.problem import Problem @@ -110,5 +110,4 @@ def count(api: str, endpoint: pf.Platform, params: types.ApiParams = None) -> in if params is None: params = {} params["ps"] = 1 - data = json.loads(endpoint.get(api, params=params).text) - return data["paging"]["total"] + return utilities.nbr_total_elements(json.loads(endpoint.get(api, params=params).text)) diff --git a/sonar/applications.py b/sonar/applications.py index fbc56b52d..6fa7d1377 100644 --- a/sonar/applications.py +++ b/sonar/applications.py @@ -443,8 +443,7 @@ def count(endpoint: pf.Platform) -> int: :rtype: int """ check_supported(endpoint) - data = json.loads(endpoint.get(APIS["search"], params={"ps": 1, "filter": "qualifier = APP"}).text) - return data["paging"]["total"] + return util.nbr_total_elements(json.loads(endpoint.get(APIS["search"], params={"ps": 1, "filter": "qualifier = APP"}).text)) def check_supported(endpoint: pf.Platform) -> None: diff --git a/sonar/branches.py b/sonar/branches.py index 61ae9299f..489434ff5 100644 --- a/sonar/branches.py +++ b/sonar/branches.py @@ -29,7 +29,7 @@ from sonar.util import types import sonar.logging as log import sonar.sqobject as sq -from sonar import components, syncer, settings, exceptions +from sonar import components, settings, exceptions from sonar import projects import sonar.utilities as util @@ -220,9 +220,6 @@ def export(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr: :return: The branch new code period definition :rtype: str """ - from sonar.issues import count as issue_count - from sonar.hotspots import count as hotspot_count - log.debug("Exporting %s", str(self)) data = {settings.NEW_CODE_PERIOD: self.new_code()} if self.is_main(): @@ -234,27 +231,7 @@ def export(self, export_settings: types.ConfigSettings) -> types.ObjectJsonRepr: if export_settings.get("FULL_EXPORT", True): data.update({"name": self.name, "project": self.concerned_object.key}) if export_settings.get("MODE", "") == "MIGRATION": - data["lastAnalysis"] = util.date_to_string(self.last_analysis()) - lang_distrib = self.get_measure("ncloc_language_distribution") - loc_distrib = {} - if lang_distrib: - loc_distrib = {m.split("=")[0]: int(m.split("=")[1]) for m in lang_distrib.split(";")} - loc_distrib["total"] = self.loc() - data["ncloc"] = loc_distrib - tpissues = self.count_third_party_issues() - params = self.search_params() - data["issues"] = { - "thirdParty": tpissues if len(tpissues) > 0 else 0, - "falsePositives": issue_count(self.endpoint, issueStatuses=["FALSE_POSITIVE"], **params), - } - status = "accepted" if self.endpoint.version() >= (10, 2, 0) else "wontFix" - data["issues"][status] = issue_count(self.endpoint, issueStatuses=[status.upper()], **params) - data["hotspots"] = { - "acknowledged": hotspot_count(self.endpoint, resolution=["ACKNOWLEDGED"], **params), - "safe": hotspot_count(self.endpoint, resolution=["SAFE"], **params), - "fixed": hotspot_count(self.endpoint, resolution=["FIXED"], **params), - } - log.debug("%s has these notable issues %s", str(self), str(data["issues"])) + data.update(self.migration_export()) data = util.remove_nones(data) return None if len(data) == 0 else data @@ -330,9 +307,11 @@ def sync(self, another_branch: Branch, sync_settings: types.ConfigSettings) -> t :return: sync report as tuple, with counts of successful and unsuccessful issue syncs :rtype: tuple(report, counters) """ + from sonar.syncer import sync_lists + report, counters = [], {} log.info("Syncing %s (%s) and %s (%s) issues", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url) - (report, counters) = syncer.sync_lists( + (report, counters) = sync_lists( list(self.get_issues().values()), list(another_branch.get_issues().values()), self, @@ -340,7 +319,7 @@ def sync(self, another_branch: Branch, sync_settings: types.ConfigSettings) -> t sync_settings=sync_settings, ) log.info("Syncing %s (%s) and %s (%s) hotspots", str(self), self.endpoint.url, str(another_branch), another_branch.endpoint.url) - (tmp_report, tmp_counts) = syncer.sync_lists( + (tmp_report, tmp_counts) = sync_lists( list(self.get_hotspots().values()), list(another_branch.get_hotspots().values()), self, diff --git a/sonar/components.py b/sonar/components.py index e81385163..9831d62e7 100644 --- a/sonar/components.py +++ b/sonar/components.py @@ -23,6 +23,7 @@ """ from __future__ import annotations +import math import json from datetime import datetime @@ -104,9 +105,9 @@ def get_subcomponents(self, strategy: str = "children", with_issues: bool = Fals "metricKeys": "bugs,vulnerabilities,code_smells,security_hotspots", } data = json.loads(self.get("measures/component_tree", params=parms).text) - nb_comp = data["paging"]["total"] + nb_comp = utilities.nbr_total_elements(data) log.debug("Found %d subcomponents to %s", nb_comp, str(self)) - nb_pages = (nb_comp + 500 - 1) // 500 + nb_pages = math.ceil(nb_comp / 500) comp_list = {} parms["ps"] = 500 for page in range(nb_pages): @@ -126,10 +127,10 @@ def get_subcomponents(self, strategy: str = "children", with_issues: bool = Fals def get_issues(self, filters: types.ApiParams = None) -> dict[str, object]: """Returns list of issues for a component, optionally on branches or/and PRs""" - from sonar.issues import component_filter, search_all + from sonar.issues import search_all log.info("Searching issues for %s with filters %s", str(self), str(filters)) - params = utilities.replace_keys(_ALT_COMPONENTS, component_filter(self.endpoint), self.search_params()) + params = self.search_params() if filters is not None: params.update(filters) params["additionalFields"] = "comments" @@ -137,18 +138,24 @@ def get_issues(self, filters: types.ApiParams = None) -> dict[str, object]: self.nbr_issues = len(issue_list) return issue_list - def count_third_party_issues(self, filters: types.ApiParams = None) -> dict[str, int]: - """Returns list of issues for a component, optionally on branches or/and PRs""" - from sonar.issues import component_filter, count_by_rule + def count_specific_rules_issues(self, ruleset: list[str], filters: types.ApiParams = None) -> dict[str, int]: + """Returns the count of issues of a component for a given ruleset""" + from sonar.issues import count_by_rule - third_party_rules = rules.third_party(self.endpoint) - params = utilities.replace_keys(_ALT_COMPONENTS, component_filter(self.endpoint), self.search_params()) + params = self.search_params() if filters is not None: params.update(filters) params["facets"] = "rules" - params["rules"] = [r.key for r in third_party_rules] - issues_count = {k: v for k, v in count_by_rule(endpoint=self.endpoint, **params).items() if v > 0} - return issues_count + params["rules"] = [r.key for r in ruleset] + return {k: v for k, v in count_by_rule(endpoint=self.endpoint, **params).items() if v > 0} + + def count_third_party_issues(self, filters: types.ApiParams = None) -> dict[str, int]: + """Returns the count of issues of a component corresponding to 3rd party rules""" + return self.count_specific_rules_issues(ruleset=rules.third_party(self.endpoint), filters=filters) + + def count_instantiated_rules_issues(self, filters: types.ApiParams = None) -> dict[str, int]: + """Returns the count of issues of a component corresponding to instantiated rules""" + return self.count_specific_rules_issues(ruleset=rules.instantiated(self.endpoint), filters=filters) def get_hotspots(self, filters: types.ApiParams = None) -> dict[str, object]: """Returns list of hotspots for a component, optionally on branches or/and PRs""" @@ -160,6 +167,35 @@ def get_hotspots(self, filters: types.ApiParams = None) -> dict[str, object]: params.update(filters) return search(endpoint=self.endpoint, filters=params) + def migration_export(self) -> dict[str, any]: + from sonar.issues import count as issue_count + from sonar.hotspots import count as hotspot_count + + json_data = {"lastAnalysis": utilities.date_to_string(self.last_analysis())} + lang_distrib = self.get_measure("ncloc_language_distribution") + loc_distrib = {} + if lang_distrib: + loc_distrib = {m.split("=")[0]: int(m.split("=")[1]) for m in lang_distrib.split(";")} + loc_distrib["total"] = self.loc() + json_data["ncloc"] = loc_distrib + tpissues = self.count_third_party_issues() + inst_issues = self.count_instantiated_rules_issues() + params = self.search_params() + json_data["issues"] = { + "thirdParty": tpissues if len(tpissues) > 0 else 0, + "instantiatedRules": inst_issues if len(inst_issues) > 0 else 0, + "falsePositives": issue_count(self.endpoint, issueStatuses=["FALSE_POSITIVE"], **params), + } + status = "accepted" if self.endpoint.version() >= (10, 2, 0) else "wontFix" + json_data["issues"][status] = issue_count(self.endpoint, issueStatuses=[status.upper()], **params) + json_data["hotspots"] = { + "acknowledged": hotspot_count(self.endpoint, resolution=["ACKNOWLEDGED"], **params), + "safe": hotspot_count(self.endpoint, resolution=["SAFE"], **params), + "fixed": hotspot_count(self.endpoint, resolution=["FIXED"], **params), + } + log.debug("%s has these notable issues %s", str(self), str(json_data["issues"])) + return json_data + def get_measures(self, metrics_list: types.KeyList) -> dict[str, any]: """Retrieves a project list of measures diff --git a/sonar/hotspots.py b/sonar/hotspots.py index 70c66ab8e..19c56bcce 100644 --- a/sonar/hotspots.py +++ b/sonar/hotspots.py @@ -21,6 +21,7 @@ from __future__ import annotations +import math import json import re from http import HTTPStatus @@ -71,7 +72,7 @@ SEVERITIES = () # Filters for search of hotspots are different than for issues :-( -_FILTERS_HOTSPOTS_REMAPPING = {"resolutions": "resolution", "statuses": "status", "componentsKey": PROJECT_FILTER_OLD, "components": PROJECT_FILTER} +_FILTERS_HOTSPOTS_REMAPPING = {"resolutions": "resolution", "statuses": "status", "componentsKey": PROJECT_FILTER, "components": PROJECT_FILTER} _OBJECTS = {} @@ -384,8 +385,7 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str, :rtype: dict{: } """ hotspots_list = {} - new_params = get_search_filters(endpoint=endpoint, params=filters) - new_params = util.dict_remap(original_dict=new_params, remapping=_FILTERS_HOTSPOTS_REMAPPING) + new_params = sanitize_search_filters(endpoint=endpoint, params=filters) filters_iterations = split_search_filters(new_params) ps = 500 if "ps" not in new_params else new_params["ps"] for inline_filters in filters_iterations: @@ -395,16 +395,15 @@ def search(endpoint: pf.Platform, filters: types.ApiParams = None) -> dict[str, while True: inline_filters["p"] = p try: - resp = endpoint.get("hotspots/search", params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)) - data = json.loads(resp.text) - nbr_hotspots = data["paging"]["total"] + data = json.loads(endpoint.get("hotspots/search", params=inline_filters, mute=(HTTPStatus.NOT_FOUND,)).text) + nbr_hotspots = util.nbr_total_elements(data) except HTTPError as e: if e.response.status_code == HTTPStatus.NOT_FOUND: log.warning("No hotspots found with search params %s", str(inline_filters)) nbr_hotspots = 0 return {} raise e - nbr_pages = (nbr_hotspots + ps - 1) // ps + nbr_pages = util.nbr_pages(data) log.debug("Number of hotspots: %d - Page: %d/%d", nbr_hotspots, inline_filters["p"], nbr_pages) if nbr_hotspots > 10000: raise TooManyHotspotsError( @@ -432,7 +431,7 @@ def get_object(endpoint: pf.Platform, key: str, data: dict[str] = None, from_exp return _OBJECTS[uid] -def get_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types.ApiParams: +def sanitize_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types.ApiParams: """Returns the filtered list of params that are allowed for api/hotspots/search""" log.debug("Sanitizing hotspot search criteria %s", str(params)) if params is None: @@ -446,6 +445,8 @@ def get_search_filters(endpoint: pf.Platform, params: types.ApiParams) -> types. criterias["status"] = "REVIEWED" if endpoint.version() <= (10, 2, 0): criterias = util.dict_remap(original_dict=criterias, remapping={PROJECT_FILTER: PROJECT_FILTER_OLD}) + else: + criterias = util.dict_remap(original_dict=criterias, remapping={PROJECT_FILTER_OLD: PROJECT_FILTER}) criterias = util.dict_subset(criterias, SEARCH_CRITERIAS) log.debug("Sanitized hotspot search criteria %s", str(criterias)) return criterias diff --git a/sonar/issues.py b/sonar/issues.py index a208cd233..7971bd1f7 100644 --- a/sonar/issues.py +++ b/sonar/issues.py @@ -20,6 +20,7 @@ from __future__ import annotations +import math from datetime import date, datetime, timedelta import json import re @@ -750,18 +751,13 @@ def search(endpoint: pf.Platform, params: ApiParams = None, raise_error: bool = :raises: TooManyIssuesError if more than 10'000 issues found """ filters = pre_search_filters(endpoint=endpoint, params=params) - # if endpoint.version() >= (10, 2, 0): - # new_params = util.dict_remap_and_stringify(new_params, _FILTERS_10_2_REMAPPING) - - log.debug("Search filters = %s", str(filters)) - if not filters: - filters = {"ps": Issue.MAX_PAGE_SIZE} - elif "ps" not in filters: + if "ps" not in filters: filters["ps"] = Issue.MAX_PAGE_SIZE + log.debug("Search filters = %s", str(filters)) issue_list = {} data = json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text) - nbr_issues = data["paging"]["total"] + nbr_issues = util.nbr_total_elements(data) nbr_pages = util.nbr_pages(data) log.debug("Number of issues: %d - Nbr pages: %d", nbr_issues, nbr_pages) @@ -822,37 +818,32 @@ def get_newest_issue(endpoint: pf.Platform, params: ApiParams = None) -> Union[d def count(endpoint: pf.Platform, **kwargs) -> int: """Returns number of issues of a search""" - params = {} if not kwargs else kwargs.copy() - filters = pre_search_filters(endpoint=endpoint, params=params) + filters = pre_search_filters(endpoint=endpoint, params=kwargs) filters["ps"] = 1 - nbr_issues = json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text)["paging"]["total"] + nbr_issues = util.nbr_total_elements(json.loads(endpoint.get(Issue.SEARCH_API, params=filters).text)) log.debug("Count issues with filters %s returned %d issues", str(kwargs), nbr_issues) return nbr_issues def count_by_rule(endpoint: pf.Platform, **kwargs) -> dict[str, int]: """Returns number of issues of a search""" - params = {} if not kwargs else kwargs.copy() - params["ps"] = 1 - params["facets"] = "rules" - SLICE_SIZE = 50 # Search rules facets by bulks of 50 nbr_slices = 1 - if "rules" in params: - nbr_slices = (len(params["rules"]) + SLICE_SIZE - 1) // SLICE_SIZE + SLICE_SIZE = 50 # Search rules facets by bulks of 50 + if "rules" in kwargs: + ruleset = kwargs.pop("rules") + nbr_slices = math.ceil(len(ruleset) / SLICE_SIZE) + params = pre_search_filters(endpoint=endpoint, params=kwargs) + params.update({"ps": 1, "facets": "rules"}) rulecount = {} for i in range(nbr_slices): - sliced_params = params.copy() - sliced_params["rules"] = ",".join(params["rules"][i * SLICE_SIZE : min((i + 1) * SLICE_SIZE - 1, len(params["rules"]))]) - # log.debug("COUNT params = %s", str(sliced_params)) - data = json.loads(endpoint.get(Issue.SEARCH_API, params=sliced_params).text)["facets"][0]["values"] - # log.debug("COUNT data results = %s", str(data)) + params["rules"] = ",".join(ruleset[i * SLICE_SIZE : min((i + 1) * SLICE_SIZE - 1, len(ruleset))]) + data = json.loads(endpoint.get(Issue.SEARCH_API, params=params).text)["facets"][0]["values"] for d in data: - if d["val"] not in params["rules"]: + if d["val"] not in ruleset: continue if d["val"] not in rulecount: rulecount[d["val"]] = 0 rulecount[d["val"]] += d["count"] - # log.debug("Rule counts = %s", util.json_dump(rulecount)) return rulecount @@ -870,7 +861,9 @@ def pre_search_filters(endpoint: pf.Platform, params: ApiParams) -> ApiParams: return {} log.debug("Sanitizing issue search filters %s", str(params)) version = endpoint.version() - filters = util.dict_remap(original_dict=params.copy(), remapping={"project": COMPONENT_FILTER}) + filters = util.dict_remap( + original_dict=params.copy(), remapping={"project": COMPONENT_FILTER, "application": COMPONENT_FILTER, "portfolio": COMPONENT_FILTER} + ) filters = util.dict_subset(util.remove_nones(filters), _SEARCH_CRITERIAS) if version < (10, 2, 0): # Starting from 10.2 - "componentKeys" was renamed "components" diff --git a/sonar/measures.py b/sonar/measures.py index ca390b88c..397e1356d 100644 --- a/sonar/measures.py +++ b/sonar/measures.py @@ -83,8 +83,7 @@ def count_history(self, params: ApiParams = None) -> int: if params is None: params = {} params.update({"component": self.concerned_object.key, "metrics": self.metric, "ps": 1}) - data = json.loads(self.get(Measure.API_HISTORY, params=params).text) - return data["paging"]["total"] + return util.nbr_total_elements(json.loads(self.get(Measure.API_HISTORY, params=params).text)) def search_history(self, params: ApiParams = None) -> dict[str, any]: """Searches the history of the measure diff --git a/sonar/projects.py b/sonar/projects.py index 1b77310ec..bb5a004ad 100644 --- a/sonar/projects.py +++ b/sonar/projects.py @@ -953,9 +953,6 @@ def export(self, export_settings: types.ConfigSettings, settings_list: dict[str, :return: All project configuration settings :rtype: dict """ - from sonar.issues import count as issue_count - from sonar.hotspots import count as hotspot_count - log.info("Exporting %s", str(self)) try: json_data = self._json.copy() @@ -980,15 +977,9 @@ def export(self, export_settings: types.ConfigSettings, settings_list: dict[str, json_data = util.filter_export(json_data, _IMPORTABLE_PROPERTIES, export_settings.get("FULL_EXPORT", False)) if export_settings.get("MODE", "") == "MIGRATION": - json_data["lastAnalysis"] = util.date_to_string(self.last_analysis()) + json_data.update(self.migration_export()) json_data["detectedCi"] = self.ci() json_data["revision"] = self.revision() - lang_distrib = self.get_measure("ncloc_language_distribution") - loc_distrib = {} - if lang_distrib: - loc_distrib = {m.split("=")[0]: int(m.split("=")[1]) for m in lang_distrib.split(";")} - loc_distrib["total"] = self.loc() - json_data["ncloc"] = loc_distrib last_task = self.last_task() json_data["backgroundTasks"] = {} if last_task: @@ -997,20 +988,6 @@ def export(self, export_settings: types.ConfigSettings, settings_list: dict[str, "lastTaskWarnings": last_task.warnings(), "taskHistory": [t._json for t in self.task_history()], } - tpissues = self.count_third_party_issues() - params = self.search_params() - json_data["issues"] = { - "thirdParty": tpissues if len(tpissues) > 0 else 0, - "falsePositives": issue_count(self.endpoint, issueStatuses=["FALSE_POSITIVE"], **params), - } - status = "accepted" if self.endpoint.version() >= (10, 2, 0) else "wontFix" - json_data["issues"][status] = issue_count(self.endpoint, issueStatuses=[status.upper()], **params) - json_data["hotspots"] = { - "acknowledged": hotspot_count(self.endpoint, resolution=["ACKNOWLEDGED"], **params), - "safe": hotspot_count(self.endpoint, resolution=["SAFE"], **params), - "fixed": hotspot_count(self.endpoint, resolution=["FIXED"], **params), - } - log.debug("%s has these notable issues %s", str(self), str(json_data["issues"])) settings_dict = settings.get_bulk(endpoint=self.endpoint, component=self, settings_list=settings_list, include_not_set=False) # json_data.update({s.to_json() for s in settings_dict.values() if include_inherited or not s.inherited}) @@ -1344,8 +1321,7 @@ def count(endpoint: pf.Platform, params: types.ApiParams = None) -> int: """ new_params = {} if params is None else params.copy() new_params.update({"ps": 1, "p": 1}) - data = json.loads(endpoint.get(Project.SEARCH_API, params=params).text) - return data["paging"]["total"] + util.nbr_total_elements(json.loads(endpoint.get(Project.SEARCH_API, params=params).text)) def search(endpoint: pf.Platform, params: types.ApiParams = None) -> dict[str, Project]: diff --git a/sonar/qualityprofiles.py b/sonar/qualityprofiles.py index d00beeec0..37aa1c7b7 100644 --- a/sonar/qualityprofiles.py +++ b/sonar/qualityprofiles.py @@ -433,8 +433,7 @@ def projects(self) -> types.KeyList: self._projects += [p["key"] for p in data["results"]] page += 1 if self.endpoint.version() >= (10, 0, 0): - nb_pages = (data["paging"]["total"] + 500 - 1) // 500 - more = nb_pages >= page + more = util.nbr_pages(data) >= page else: more = data["more"] diff --git a/sonar/rules.py b/sonar/rules.py index 21c96ebe9..97595b5c5 100644 --- a/sonar/rules.py +++ b/sonar/rules.py @@ -384,3 +384,8 @@ def convert_for_yaml(original_json: types.ObjectJsonRepr) -> types.ObjectJsonRep def third_party(endpoint: platform.Platform) -> list[Rule]: """Returns the list of rules coming from 3rd party plugins""" return [r for r in get_list(endpoint=endpoint).values() if r.repo and r.repo not in SONAR_REPOS and not r.repo.startswith("external_")] + + +def instantiated(endpoint: platform.Platform) -> list[Rule]: + """Returns the list of rules that are instantiated""" + return [r for r in get_list(endpoint=endpoint).values() if r.template_key is not None] diff --git a/sonar/utilities.py b/sonar/utilities.py index dc1eebaac..b06854337 100644 --- a/sonar/utilities.py +++ b/sonar/utilities.py @@ -26,6 +26,7 @@ from http import HTTPStatus import sys import os +import math import contextlib import re import json @@ -355,17 +356,22 @@ def update_json(json_data: dict[str, str], categ: str, subcateg: str, value: any return json_data -def int_div_ceil(number: int, divider: int) -> int: - """Computes rounded up int division""" - return (number + divider - 1) // divider - - def nbr_pages(sonar_api_json: dict[str, str]) -> int: """Returns nbr of pages of a paginated Sonar API call""" + if "paging" in sonar_api_json: + return math.ceil(sonar_api_json["paging"]["total"] / sonar_api_json["paging"]["pageSize"]) + elif "total" in sonar_api_json: + return math.ceil(sonar_api_json["total"] / sonar_api_json["ps"]) + else: + return 1 + + +def nbr_total_elements(sonar_api_json: dict[str, str]) -> int: + """Returns nbr of elements of a paginated Sonar API call""" if "total" in sonar_api_json: - return int_div_ceil(sonar_api_json["total"], sonar_api_json["ps"]) + return sonar_api_json["total"] elif "paging" in sonar_api_json: - return int_div_ceil(sonar_api_json["paging"]["total"], sonar_api_json["paging"]["pageSize"]) + return sonar_api_json["paging"]["total"] else: return 1 diff --git a/test/test_findings.py b/test/test_findings.py index 2a9687c00..f844b4640 100644 --- a/test/test_findings.py +++ b/test/test_findings.py @@ -455,12 +455,16 @@ def test_output_format_branch() -> None: findings_export.main() assert int(str(e.value)) == errcodes.OK br_list = utilities.csv_to_list(br) + br, pr = BRANCH_COL, PR_COL + if util.SQ.version() < (10, 2, 0): + br += 1 + pr += 1 with open(util.CSV_FILE, encoding="utf-8") as fd: reader = csv.reader(fd) next(reader) for line in reader: - assert line[BRANCH_COL] in br_list - assert line[PR_COL] == "" + assert line[br] in br_list + assert line[pr] == "" assert line[PROJECT_COL] == "okorach_sonar-tools" util.clean(util.CSV_FILE)