Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@
try:
html_context # noqa: B018
except NameError:
html_context = dict()
html_context = {}

html_context["display_versions_lower_left"] = True

Expand Down
6 changes: 3 additions & 3 deletions parsons/action_kit/action_kit.py
Original file line number Diff line number Diff line change
Expand Up @@ -1432,7 +1432,7 @@ def bulk_upload_table(
results = []
for tbl in upload_tables:
user_fields_only = int(
not any([h for h in tbl.columns if h != "email" and not h.startswith("user_")])
not any(h for h in tbl.columns if h != "email" and not h.startswith("user_"))
)
results.append(
self.bulk_upload_csv(
Expand All @@ -1442,7 +1442,7 @@ def bulk_upload_table(
user_fields_only=user_fields_only,
)
)
return {"success": all([r["success"] for r in results]), "results": results}
return {"success": all(r["success"] for r in results), "results": results}

def _split_tables_no_empties(self, table, no_overwrite_on_empty, set_only_columns):
table_groups = {}
Expand All @@ -1463,7 +1463,7 @@ def _split_tables_no_empties(self, table, no_overwrite_on_empty, set_only_column
subset_table.table = subset_table.table.cutout(*blanks)
logger.debug(f"Column Upload Blanks: {blanks}")
logger.debug(f"Column Upload Columns: {subset_table.columns}")
if not set(["user_id", "email"]).intersection(subset_table.columns):
if not {"user_id", "email"}.intersection(subset_table.columns):
logger.warning(
f"Upload will fail without user_id or email. "
f"Rows: {subset_table.num_rows}, Columns: {subset_table.columns}"
Expand Down
2 changes: 1 addition & 1 deletion parsons/airtable/airtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,7 @@ def delete_records(self, table):
# is provided then map the ids into the expected list of id strings.

if any(isinstance(row, dict) for row in table):
table = list(map(lambda row: row["id"], table))
table = [row["id"] for row in table]

resp = self.client.batch_delete(table)
logger.info(f"{len(table)} records deleted.")
Expand Down
2 changes: 1 addition & 1 deletion parsons/aws/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def list_keys(
'Size', and 'Owner'.
"""

keys_dict = dict()
keys_dict = {}
logger.debug(f"Fetching keys in {bucket} bucket")

continuation_token = None
Expand Down
2 changes: 1 addition & 1 deletion parsons/azure/azure_blob_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def list_blobs(self, container_name, name_starts_with=None):
"""

container_client = self.get_container(container_name)
blobs = [blob for blob in container_client.list_blobs(name_starts_with=name_starts_with)]
blobs = list(container_client.list_blobs(name_starts_with=name_starts_with))
logger.info(f"Found {len(blobs)} blobs in {container_name} container.")
return blobs

Expand Down
10 changes: 3 additions & 7 deletions parsons/braintree/braintree.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def get_disputes(self, start_date=None, end_date=None, query_list=None, query_di
query_list=query_list,
query_dict=query_dict,
default_query=(
{"effective_date": dict(between=[start_date, end_date])}
{"effective_date": {"between": [start_date, end_date]}}
if start_date and end_date
else None
),
Expand Down Expand Up @@ -310,7 +310,7 @@ def get_subscriptions(
query_list=query_list,
query_dict=query_dict,
default_query=(
{"created_at": dict(between=[start_date, end_date])}
{"created_at": {"between": [start_date, end_date]}}
if start_date and end_date
else None
),
Expand Down Expand Up @@ -389,11 +389,7 @@ def get_transactions(
query_list=query_list,
query_dict=query_dict,
default_query=(
{
"disbursement_date": dict(
between=[disbursement_start_date, disbursement_end_date]
)
}
{"disbursement_date": {"between": [disbursement_start_date, disbursement_end_date]}}
if disbursement_start_date and disbursement_end_date
else None
),
Expand Down
4 changes: 2 additions & 2 deletions parsons/capitol_canary/capitol_canary.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def create_advocate(

# Turn into a list of items so we can append multiple campaigns
campaign_keys = [("campaigns[]", val) for val in campaigns]
data = [(key, value) for key, value in payload.items()] + campaign_keys
data = list(payload.items()) + campaign_keys

# Call into the CapitolCanary API
response = self.client.post_request("advocates", data=data)
Expand Down Expand Up @@ -387,7 +387,7 @@ def update_advocate(
# Turn into a list of items so we can append multiple campaigns
campaigns = campaigns or []
campaign_keys = [("campaigns[]", val) for val in campaigns]
data = [(key, value) for key, value in payload.items()] + campaign_keys
data = list(payload.items()) + campaign_keys

# Call into the CapitolCanary API
self.client.post_request("advocates", data=data)
2 changes: 1 addition & 1 deletion parsons/etl/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def __getitem__(self, index):

elif isinstance(index, slice):
tblslice = petl.rowslice(self.table, index.start, index.stop, index.step)
return [row for row in tblslice]
return list(tblslice)

else:
raise TypeError("You must pass a string or an index as a value.")
Expand Down
2 changes: 1 addition & 1 deletion parsons/facebook_ads/facebook_ads.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def get_match_table_for_users_table(users_table):
@staticmethod
def _get_match_schema_and_data(table):
# Grab the raw data as a list of tuples
data_list = [row for row in table.data]
data_list = list(table.data)
return (table.columns, data_list)

@staticmethod
Expand Down
2 changes: 1 addition & 1 deletion parsons/google/google_cloud_storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def list_blobs(
)

if include_file_details:
lst = [b for b in blobs]
lst = list(blobs)
else:
lst = [b.name for b in blobs]

Expand Down
4 changes: 2 additions & 2 deletions parsons/ngpvan/email.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def get_email_stats(self, aggregate_ab: bool = True) -> Table:
if aggregate_ab:
for email in email_list: # One row per foreignMessageId
outer = {field: email[field] for field in outer_fields}
inner = {field: 0 for field in inner_fields}
inner = dict.fromkeys(inner_fields, 0)
for i in email["emailMessageContent"]:
# Pending emails don't have emailMessageContentDistributions, just have defaults
if not i["emailMessageContentDistributions"]:
Expand All @@ -149,7 +149,7 @@ def get_email_stats(self, aggregate_ab: bool = True) -> Table:
for i in email["emailMessageContent"]:
# One row per foreignMessageId / emailMessageContent entry
outer = {field: email[field] for field in outer_fields}
inner = {field: 0 for field in inner_fields}
inner = dict.fromkeys(inner_fields, 0)
if not i["emailMessageContentDistributions"]:
logger.info(
f"No emailMessageContentDistributions for email {i['name']}, defaulting values to 0"
Expand Down
2 changes: 1 addition & 1 deletion parsons/scytl/scytl.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def __init__(self, state: str, election_id: str, county=""):
self.previous_details_version_num = None
self.previous_county_details_version_num = None
self.previous_county_details_list = None
self.previously_fetched_counties = set([])
self.previously_fetched_counties = set()

def _parse_date_to_utc(self, input_dt: str) -> datetime:
"""
Expand Down
2 changes: 1 addition & 1 deletion parsons/sftp/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


def connection_exists(args, kwargs):
if any([isinstance(arg, paramiko.sftp_client.SFTPClient) for arg in args]):
if any(isinstance(arg, paramiko.sftp_client.SFTPClient) for arg in args):
return True
if "connection" in kwargs and kwargs["connection"]:
return True
Expand Down
2 changes: 1 addition & 1 deletion parsons/targetsmart/targetsmart_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def radius_search(
}

r = self.connection.request(url, args=args, raw=True)
return Table([itm for itm in r["output"]]).unpack_dict("data_fields", prepend=False)
return Table(list(r["output"])).unpack_dict("data_fields", prepend=False)

def phone(self, table):
"""
Expand Down
4 changes: 2 additions & 2 deletions parsons/utilities/dbt/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def format_command_result(

# Skips
if manifest.skips:
skips = set([i.node.name for i in manifest.skips])
skips = {i.node.name for i in manifest.skips}
log_message += "\nSkipped:\n```{}```".format(", ".join(skips))

return log_message
Expand All @@ -130,7 +130,7 @@ def format_result(self) -> str:
full_log_message = ""

# Header
if any([command.errors for command in self.commands]):
if any(command.errors for command in self.commands):
status = "failed"
full_log_message += "\U0001f534"
else:
Expand Down
2 changes: 1 addition & 1 deletion parsons/utilities/dbt/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def filter_results(self, **kwargs) -> list[NodeResult]:
filtered_results = [
result
for result in self.dbt_manifest
if all([str(getattr(result, key)) == value for key, value in kwargs.items()])
if all(str(getattr(result, key)) == value for key, value in kwargs.items())
]
return filtered_results

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ select = [
"B", # flake8-bugbear (B)
"E", # pycodestyle errors (E)
"W", # pycodestyle warnings (W)
"C4", # flake8-comprehensions (C4)
"F", # Pyflakes (F)
"I", # isort (I)
"TID", # flake8-tidy-imports (TID)
Expand Down
8 changes: 4 additions & 4 deletions test/test_action_kit.py
Original file line number Diff line number Diff line change
Expand Up @@ -485,12 +485,12 @@ def test_paginated_get(self):
first_mock.status_code = 201
first_mock.json = lambda: {
"meta": {"next": "/rest/v1/user/abc"},
"objects": list(map(lambda x: {"value": x}, [*range(100)])),
"objects": [{"value": x} for x in [*range(100)]],
}
second_mock.status_code = 201
second_mock.json = lambda: {
"meta": {"next": "/rest/v1/user/def"},
"objects": list(map(lambda x: {"value": x}, [*range(100, 200)])),
"objects": [{"value": x} for x in [*range(100, 200)]],
}
resp_mock.get.side_effect = [first_mock, second_mock]
self.actionkit.conn = resp_mock
Expand All @@ -513,12 +513,12 @@ def test_paginated_get_custom_limit(self):
first_mock.status_code = 201
first_mock.json = lambda: {
"meta": {"next": "/rest/v1/user/abc"},
"objects": list(map(lambda x: {"value": x}, [*range(100)])),
"objects": [{"value": x} for x in [*range(100)]],
}
second_mock.status_code = 201
second_mock.json = lambda: {
"meta": {"next": "/rest/v1/user/def"},
"objects": list(map(lambda x: {"value": x}, [*range(100, 200)])),
"objects": [{"value": x} for x in [*range(100, 200)]],
}
resp_mock.get.side_effect = [first_mock, second_mock]
self.actionkit.conn = resp_mock
Expand Down
2 changes: 1 addition & 1 deletion test/test_airtable/test_airtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,4 +220,4 @@ def test_delete_records(self, m):
resp = self.at.delete_records(tbl)

self.assertEqual(len(delete_responses["records"]), len(resp))
self.assertTrue(all([r["deleted"] for r in resp]))
self.assertTrue(all(r["deleted"] for r in resp))
8 changes: 4 additions & 4 deletions test/test_catalist/test_catalist.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def test_upload(self, mock_requests) -> None:
requested_base_url = mock_requests._adapter.request_history[1]._url_parts.netloc

assert requested_base_url == "api.catalist.us"
assert set(requested_queries.keys()) == set(["token"])
assert set(requested_queries.keys()) == {"token"}
assert requested_queries["token"] == ["tokenexample"]
assert requested_endpoint.startswith("/mapi/upload/template/48827/action/publish/url/")

Expand All @@ -112,7 +112,7 @@ def test_upload_with_options(self, mock_requests) -> None:

requested_queries = mock_requests._adapter.request_history[1].qs

assert set(requested_queries.keys()) == set(["token", "copytosandbox", "phone"])
assert set(requested_queries.keys()) == {"token", "copytosandbox", "phone"}
assert requested_queries["copytosandbox"] == ["true"]
assert requested_queries["phone"] == ["123456789"]

Expand All @@ -128,7 +128,7 @@ def test_status(self, mock_requests) -> None:
requested_base_url = mock_requests._adapter.request_history[1]._url_parts.netloc

assert requested_base_url == "api.catalist.us"
assert set(requested_queries.keys()) == set(["token"])
assert set(requested_queries.keys()) == {"token"}
assert requested_queries["token"] == ["tokenexample"]
assert requested_endpoint == "/mapi/status/id/12345"

Expand All @@ -147,7 +147,7 @@ def test_load_matches(self) -> None:
first_called_method = str(first_mocked_call).split("(")[0].split(".")[1]

assert first_called_method == "list_directory"
assert set(first_mocked_call.args) == set(["/myDownloads/"])
assert set(first_mocked_call.args) == {"/myDownloads/"}

second_mocked_call = match.sftp.mock_calls[1]
second_called_method = str(second_mocked_call).split("(")[0].split(".")[1]
Expand Down
2 changes: 1 addition & 1 deletion test/test_databases/fakes.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def distinct_primary_key(self, primary_key):
if primary_key not in self.data.columns:
return True

pk_values = [val for val in self.data[primary_key]]
pk_values = list(self.data[primary_key])
pk_set = set(pk_values)
return len(pk_set) == len(pk_values)

Expand Down
4 changes: 2 additions & 2 deletions test/test_databases/test_bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def test_query__no_results(self):
# Because Table() == Table() fails for some reason
assert isinstance(result, Table)
assert not len(result)
assert tuple(result.columns) == tuple([])
assert tuple(result.columns) == ()

@mock.patch("parsons.utilities.files.create_temp_file")
def test_query__no_return(self, create_temp_file_mock):
Expand Down Expand Up @@ -119,7 +119,7 @@ def test_query_with_transaction(self, create_temp_file_mock):

# Check that queries and transaction keywords are included in sql
self.assertTrue(
all([text in keyword_args["sql"] for text in queries + ["BEGIN TRANSACTION", "COMMIT"]])
all(text in keyword_args["sql"] for text in queries + ["BEGIN TRANSACTION", "COMMIT"])
)
self.assertEqual(keyword_args["parameters"], parameters)
self.assertFalse(keyword_args["return_values"])
Expand Down
4 changes: 2 additions & 2 deletions test/test_sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,15 +210,15 @@ def test_table_to_sftp_csv(live_sftp, simple_table, compression): # noqa F811
def assert_results_match_expected(expected, results):
assert len(results) == len(expected)
for e in expected:
assert any([e in r for r in results])
assert any(e in r for r in results)


def assert_has_call(mock, args):
return call(*args) in mock.mock_calls


def assert_has_calls(mock, calls):
return all([assert_has_call(mock, c) for c in calls])
return all(assert_has_call(mock, c) for c in calls)


@mark_live_test
Expand Down
2 changes: 1 addition & 1 deletion useful_resources/sample_code/apply_activist_code.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
myv_keys = {k: VAN(api_key=v, db=os.environ["VAN_DB_NAME"]) for k, v in myv_states.items()}

# Create simple set of states for insertion into SQL
states = "','".join([s for s in myv_keys])
states = "','".join(list(myv_keys))

# SQL to pull those needing Activist Code
sql = f"""
Expand Down
Loading