From dd105cb5dd3f16d0b7ef9fb9079dd302620f7b06 Mon Sep 17 00:00:00 2001 From: Wil T Date: Sat, 12 Jul 2025 11:31:40 -0400 Subject: [PATCH 1/2] add Pyupgrade rules --- docs/conf.py | 1 - parsons/actblue/actblue.py | 2 +- parsons/action_builder/action_builder.py | 2 +- parsons/action_kit/action_kit.py | 2 +- parsons/action_network/action_network.py | 6 +- parsons/airmeet/airmeet.py | 2 +- parsons/airtable/airtable.py | 2 +- parsons/alchemer/alchemer.py | 2 +- parsons/auth0/auth0.py | 2 +- parsons/aws/lambda_distribute.py | 2 +- parsons/aws/s3.py | 4 +- parsons/azure/azure_blob_storage.py | 4 +- parsons/bill_com/bill_com.py | 12 +-- parsons/bloomerang/bloomerang.py | 2 +- parsons/box/box.py | 2 +- parsons/braintree/braintree.py | 2 +- parsons/capitol_canary/capitol_canary.py | 2 +- parsons/catalist/catalist.py | 16 ++-- parsons/census/census.py | 2 +- parsons/civis/civisclient.py | 2 +- parsons/community/community.py | 2 +- parsons/controlshift/controlshift.py | 2 +- parsons/copper/copper.py | 2 +- parsons/crowdtangle/crowdtangle.py | 2 +- parsons/databases/discover_database.py | 12 +-- parsons/databases/postgres/postgres.py | 2 +- parsons/databases/redshift/redshift.py | 6 +- parsons/databases/redshift/rs_copy_table.py | 10 +- parsons/databases/redshift/rs_create_table.py | 12 +-- parsons/databases/redshift/rs_schema.py | 2 +- .../databases/redshift/rs_table_utilities.py | 10 +- parsons/donorbox/donorbox.py | 2 +- parsons/empower/empower.py | 2 +- parsons/etl/etl.py | 2 +- parsons/etl/tofrom.py | 2 +- parsons/facebook_ads/facebook_ads.py | 4 +- parsons/formstack/formstack.py | 2 +- parsons/geocode/census_geocoder.py | 2 +- parsons/github/github.py | 2 +- parsons/google/google_admin.py | 2 +- parsons/google/google_bigquery.py | 12 +-- parsons/google/google_civic.py | 2 +- parsons/google/google_cloud_storage.py | 2 +- parsons/google/utilities.py | 10 +- parsons/hustle/hustle.py | 38 ++++---- parsons/mobilize_america/ma.py | 2 +- parsons/nation_builder/nation_builder.py | 20 ++-- parsons/newmode/newmode.py | 94 +++++++++---------- parsons/ngpvan/activist_codes.py | 2 +- parsons/ngpvan/bulk_import.py | 2 +- parsons/ngpvan/canvass_responses.py | 2 +- parsons/ngpvan/changed_entities.py | 2 +- parsons/ngpvan/codes.py | 2 +- parsons/ngpvan/contact_notes.py | 2 +- parsons/ngpvan/email.py | 2 +- parsons/ngpvan/events.py | 2 +- parsons/ngpvan/introspection.py | 2 +- parsons/ngpvan/locations.py | 2 +- parsons/ngpvan/people.py | 10 +- parsons/ngpvan/printed_lists.py | 2 +- parsons/ngpvan/saved_lists.py | 6 +- parsons/ngpvan/scores.py | 4 +- parsons/ngpvan/signups.py | 2 +- parsons/ngpvan/supporter_groups.py | 2 +- parsons/ngpvan/survey_questions.py | 2 +- parsons/ngpvan/targets.py | 2 +- parsons/ngpvan/van_connector.py | 2 +- parsons/notifications/slack.py | 2 +- parsons/phone2action/p2a.py | 2 +- parsons/quickbase/quickbase.py | 2 +- parsons/redash/redash.py | 6 +- parsons/scytl/scytl.py | 28 +++--- parsons/sftp/sftp.py | 6 +- parsons/shopify/shopify.py | 15 +-- parsons/sisense/sisense.py | 2 +- parsons/targetsmart/targetsmart_api.py | 4 +- parsons/targetsmart/targetsmart_automation.py | 2 +- parsons/tools/credential_tools.py | 2 +- parsons/turbovote/turbovote.py | 2 +- parsons/utilities/api_connector.py | 2 +- parsons/utilities/dbt/dbt.py | 12 +-- parsons/utilities/dbt/logging.py | 6 +- parsons/utilities/oauth_api_connector.py | 6 +- parsons/zoom/zoom.py | 10 +- pyproject.toml | 1 + .../test_action_builder.py | 4 +- test/test_aws_async.py | 2 +- test/test_azure/test_azure_blob_storage.py | 2 +- test/test_catalist/conftest.py | 2 +- test/test_copper/test_copper.py | 4 +- test/test_credential_tools.py | 2 +- test/test_databases/test_bigquery.py | 28 ++---- test/test_databases/test_dbsync.py | 8 +- test/test_etl.py | 6 +- test/test_github/test_github.py | 12 +-- test/test_gmail/test_gmail.py | 12 +-- test/test_google/test_google_cloud_storage.py | 2 +- test/test_google/test_utilities.py | 10 +- test/test_rockthevote/test_rtv.py | 2 +- test/test_scytl/test_scytl.py | 10 +- test/test_shopify.py | 2 +- test/test_slack/test_slack.py | 12 +-- test/test_smtp.py | 2 +- .../test_targetsmart_automation.py | 4 +- test/test_turbovote/test_turbovote.py | 2 +- test/test_utilities.py | 4 +- test/test_van/test_events.py | 2 +- test/test_van/test_people.py | 4 +- test/test_van/test_scores.py | 4 +- 109 files changed, 309 insertions(+), 328 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index b764e4aba4..66454ce6aa 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # # Configuration file for the Sphinx documentation builder. # diff --git a/parsons/actblue/actblue.py b/parsons/actblue/actblue.py index 30c7cb34fd..d5690bfc39 100644 --- a/parsons/actblue/actblue.py +++ b/parsons/actblue/actblue.py @@ -11,7 +11,7 @@ ACTBLUE_API_ENDPOINT = "https://secure.actblue.com/api/v1" -class ActBlue(object): +class ActBlue: """ Instantiate class. diff --git a/parsons/action_builder/action_builder.py b/parsons/action_builder/action_builder.py index 4d25ab045d..8e2ca92711 100644 --- a/parsons/action_builder/action_builder.py +++ b/parsons/action_builder/action_builder.py @@ -10,7 +10,7 @@ API_URL = "https://{subdomain}.actionbuilder.org/api/rest/v1" -class ActionBuilder(object): +class ActionBuilder: """ `Args:` api_token: str diff --git a/parsons/action_kit/action_kit.py b/parsons/action_kit/action_kit.py index d1e03dfb70..7e11724632 100644 --- a/parsons/action_kit/action_kit.py +++ b/parsons/action_kit/action_kit.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class ActionKit(object): +class ActionKit: """ Instantiate the ActionKit class diff --git a/parsons/action_network/action_network.py b/parsons/action_network/action_network.py index dacb36a7d5..1efda2eb75 100644 --- a/parsons/action_network/action_network.py +++ b/parsons/action_network/action_network.py @@ -2,7 +2,7 @@ import logging import re import warnings -from typing import Dict, List, Literal, Union +from typing import Literal, Union from parsons import Table from parsons.utilities import check_env @@ -13,7 +13,7 @@ API_URL = "https://actionnetwork.org/api/v2" -class ActionNetwork(object): +class ActionNetwork: """ `Args:` api_token: str @@ -1155,7 +1155,7 @@ def get_person(self, person_id): def upsert_person( self, - email_address: Union[str, List[str], List[Dict[str, str]]] = None, + email_address: Union[str, list[str], list[dict[str, str]]] = None, given_name=None, family_name=None, tags=None, diff --git a/parsons/airmeet/airmeet.py b/parsons/airmeet/airmeet.py index 9d0838067d..5ed003e6d7 100644 --- a/parsons/airmeet/airmeet.py +++ b/parsons/airmeet/airmeet.py @@ -5,7 +5,7 @@ AIRMEET_DEFAULT_URI = "https://api-gateway.airmeet.com/prod/" -class Airmeet(object): +class Airmeet: """ Instantiate class. diff --git a/parsons/airtable/airtable.py b/parsons/airtable/airtable.py index ac349091dc..376e3e826f 100644 --- a/parsons/airtable/airtable.py +++ b/parsons/airtable/airtable.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -class Airtable(object): +class Airtable: """ `Args:` base_key: str diff --git a/parsons/alchemer/alchemer.py b/parsons/alchemer/alchemer.py index 8b6b6e9356..a0d49343d8 100644 --- a/parsons/alchemer/alchemer.py +++ b/parsons/alchemer/alchemer.py @@ -23,7 +23,7 @@ def sg_compatibility(): os.environ["ALCHEMER_API_VERSION"] = os.getenv("SURVEYGIZMO_API_VERSION") -class Alchemer(object): +class Alchemer: """ Instantiate Alchemer Class diff --git a/parsons/auth0/auth0.py b/parsons/auth0/auth0.py index 00f8dbf87c..a7cd4035fb 100644 --- a/parsons/auth0/auth0.py +++ b/parsons/auth0/auth0.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class Auth0(object): +class Auth0: """ Instantiate the Auth0 class diff --git a/parsons/aws/lambda_distribute.py b/parsons/aws/lambda_distribute.py index fedb7c850d..c4acbf7a69 100644 --- a/parsons/aws/lambda_distribute.py +++ b/parsons/aws/lambda_distribute.py @@ -54,7 +54,7 @@ def get_range(self, bucket, key, rangestart, rangeend): # so e.g. while python returns 2 bytes for data[2:4] # Range: bytes=2-4 will return 3!! So we subtract 1 response = self.s3.client.get_object( - Bucket=bucket, Key=key, Range="bytes={}-{}".format(rangestart, rangeend - 1) + Bucket=bucket, Key=key, Range=f"bytes={rangestart}-{rangeend - 1}" ) return response["Body"].read() diff --git a/parsons/aws/s3.py b/parsons/aws/s3.py index abba5fe8fb..e0dbe78759 100644 --- a/parsons/aws/s3.py +++ b/parsons/aws/s3.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class AWSConnection(object): +class AWSConnection: def __init__( self, aws_access_key_id=None, @@ -43,7 +43,7 @@ def __init__( self.session = boto3.Session() -class S3(object): +class S3: """ Instantiate the S3 class. diff --git a/parsons/azure/azure_blob_storage.py b/parsons/azure/azure_blob_storage.py index 7bbefb7e37..b9600bbffd 100644 --- a/parsons/azure/azure_blob_storage.py +++ b/parsons/azure/azure_blob_storage.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class AzureBlobStorage(object): +class AzureBlobStorage: """ Instantiate AzureBlobStorage Class for a given Azure storage account. @@ -261,7 +261,7 @@ def _get_content_settings_from_dict(self, kwargs_dict): kwargs_dict: dict A dict which should be processed and may have keys for ``ContentSettings`` `Returns:` - Tuple[Optional[ContentSettings], dict] + tuple[Optional[ContentSettings], dict] Any created settings or ``None`` and the dict with settings keys remvoed """ diff --git a/parsons/bill_com/bill_com.py b/parsons/bill_com/bill_com.py index 70455e2485..8dffae45a5 100644 --- a/parsons/bill_com/bill_com.py +++ b/parsons/bill_com/bill_com.py @@ -5,7 +5,7 @@ from parsons import Table -class BillCom(object): +class BillCom: """ `Args:` user_name: str @@ -28,7 +28,7 @@ def __init__(self, user_name, password, org_id, dev_key, api_url): "orgId": org_id, "devKey": dev_key, } - response = requests.post(url="%sLogin.json" % api_url, data=params, headers=self.headers) + response = requests.post(url=f"{api_url}Login.json", data=params, headers=self.headers) self.dev_key = dev_key self.api_url = api_url self.session_id = response.json()["response_data"]["sessionId"] @@ -69,14 +69,14 @@ def _post_request(self, data, action, object_name): """ if action == "Read": - url = "%sCrud/%s/%s.json" % (self.api_url, action, object_name) + url = f"{self.api_url}Crud/{action}/{object_name}.json" elif action == "Create": data["obj"]["entity"] = object_name - url = "%sCrud/%s/%s.json" % (self.api_url, action, object_name) + url = f"{self.api_url}Crud/{action}/{object_name}.json" elif action == "Send": - url = "%s%s%s.json" % (self.api_url, action, object_name) + url = f"{self.api_url}{action}{object_name}.json" else: - url = "%s%s/%s.json" % (self.api_url, action, object_name) + url = f"{self.api_url}{action}/{object_name}.json" payload = self._get_payload(data) response = requests.post(url=url, data=payload, headers=self.headers) return response.json() diff --git a/parsons/bloomerang/bloomerang.py b/parsons/bloomerang/bloomerang.py index 70c1041e44..47a072283b 100644 --- a/parsons/bloomerang/bloomerang.py +++ b/parsons/bloomerang/bloomerang.py @@ -13,7 +13,7 @@ URI_AUTH = "https://crm.bloomerang.co/authorize/" -class Bloomerang(object): +class Bloomerang: """ Instantiate Bloomerang class diff --git a/parsons/box/box.py b/parsons/box/box.py index 946a4e7bde..ecd66fe305 100644 --- a/parsons/box/box.py +++ b/parsons/box/box.py @@ -31,7 +31,7 @@ DEFAULT_FOLDER_ID = "0" -class Box(object): +class Box: """Box is a file storage provider. `Args:` diff --git a/parsons/braintree/braintree.py b/parsons/braintree/braintree.py index 81a8d9bc5a..3ca0f0c142 100644 --- a/parsons/braintree/braintree.py +++ b/parsons/braintree/braintree.py @@ -12,7 +12,7 @@ class ParsonsBraintreeError(Exception): pass -class Braintree(object): +class Braintree: """ Braintree is a payment processor. `Args:` diff --git a/parsons/capitol_canary/capitol_canary.py b/parsons/capitol_canary/capitol_canary.py index 4c92ecb25f..2f1154f2c6 100644 --- a/parsons/capitol_canary/capitol_canary.py +++ b/parsons/capitol_canary/capitol_canary.py @@ -12,7 +12,7 @@ CAPITOL_CANARY_URI = "https://api.phone2action.com/2.0/" -class CapitolCanary(object): +class CapitolCanary: """ Instantiate CapitolCanary Class diff --git a/parsons/catalist/catalist.py b/parsons/catalist/catalist.py index d3fc981a59..c2802e57b8 100644 --- a/parsons/catalist/catalist.py +++ b/parsons/catalist/catalist.py @@ -9,7 +9,7 @@ import tempfile import time import urllib -from typing import Dict, List, Optional, Union +from typing import Optional, Union from zipfile import ZipFile from parsons.etl import Table @@ -118,7 +118,7 @@ def match( export_filename_suffix: Optional[str] = None, input_subfolder: Optional[str] = None, copy_to_sandbox: bool = False, - static_values: Optional[Dict[str, Union[str, int]]] = None, + static_values: Optional[dict[str, Union[str, int]]] = None, wait: int = 30, ) -> Table: """Load table to the Catalist Match API, returns matched table. @@ -169,7 +169,7 @@ def upload( export_filename_suffix: Optional[str] = None, input_subfolder: Optional[str] = None, copy_to_sandbox: bool = False, - static_values: Optional[Dict[str, Union[str, int]]] = None, + static_values: Optional[dict[str, Union[str, int]]] = None, ) -> dict: """Load table to the Catalist Match API, returns response with job metadata. @@ -225,7 +225,7 @@ def upload( endpoint = "/".join(endpoint_params) # Assemble query parameters - query_params: Dict[str, Union[str, int]] = {"token": self.connection.token["access_token"]} + query_params: dict[str, Union[str, int]] = {"token": self.connection.token["access_token"]} if copy_to_sandbox: query_params["copyToSandbox"] = "true" if static_values: @@ -245,12 +245,12 @@ def upload( def action( self, - file_ids: Union[str, List[str]], + file_ids: Union[str, list[str]], match: bool = False, export: bool = False, export_filename_suffix: Optional[str] = None, copy_to_sandbox: bool = False, - ) -> List[dict]: + ) -> list[dict]: """Perform actions on existing files. All files must be in Finished status (if the action requested is publish), and @@ -258,7 +258,7 @@ def action( action has been queued. `Args:` - file_ids: str or List[str] + file_ids: str or list[str] one or more file_ids (found in the `id` key of responses from the upload() or status() methods) match: bool @@ -408,7 +408,7 @@ def validate_table(self, table: Table, template_id: str = "48827") -> None: "matchbackid", ] - required_columns: List[str] = ["first_name", "last_name"] + required_columns: list[str] = ["first_name", "last_name"] actual_table_columns = table.columns unexpected_columns = [ diff --git a/parsons/census/census.py b/parsons/census/census.py index 5798698b53..bafada70eb 100644 --- a/parsons/census/census.py +++ b/parsons/census/census.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Census(object): +class Census: """ Class that creates a connector to the Census Bureau API """ diff --git a/parsons/civis/civisclient.py b/parsons/civis/civisclient.py index 61d689aba6..a5e276a0d9 100644 --- a/parsons/civis/civisclient.py +++ b/parsons/civis/civisclient.py @@ -4,7 +4,7 @@ from parsons.utilities import check_env -class CivisClient(object): +class CivisClient: """ Instantiate the Civis class. diff --git a/parsons/community/community.py b/parsons/community/community.py index a327bf78bb..b5b10fbf37 100644 --- a/parsons/community/community.py +++ b/parsons/community/community.py @@ -9,7 +9,7 @@ COMMUNITY_API_ENDPOINT = "https://dl.community.com/download/v1/files/" -class Community(object): +class Community: """ Instantiate class. diff --git a/parsons/controlshift/controlshift.py b/parsons/controlshift/controlshift.py index 3164037b25..6381182731 100644 --- a/parsons/controlshift/controlshift.py +++ b/parsons/controlshift/controlshift.py @@ -3,7 +3,7 @@ from parsons.utilities.oauth_api_connector import OAuth2APIConnector -class Controlshift(object): +class Controlshift: """ Instantiate the Controlshift class. Requires an API Application integration. For more info on setup, see: diff --git a/parsons/copper/copper.py b/parsons/copper/copper.py index 06fe46d5ce..723385be39 100644 --- a/parsons/copper/copper.py +++ b/parsons/copper/copper.py @@ -13,7 +13,7 @@ COPPER_URI = "https://api.prosperworks.com/developer_api/v1" -class Copper(object): +class Copper: """ Instantiate Copper Class diff --git a/parsons/crowdtangle/crowdtangle.py b/parsons/crowdtangle/crowdtangle.py index 38ee78a21b..eef4f72b63 100644 --- a/parsons/crowdtangle/crowdtangle.py +++ b/parsons/crowdtangle/crowdtangle.py @@ -13,7 +13,7 @@ REQUEST_SLEEP = 10 # CT has a rather agressive 6 requests per minute rate limit. -class CrowdTangle(object): +class CrowdTangle: """ Instantiate CrowdTangle Class diff --git a/parsons/databases/discover_database.py b/parsons/databases/discover_database.py index dbb05b9e03..6fabe83151 100644 --- a/parsons/databases/discover_database.py +++ b/parsons/databases/discover_database.py @@ -1,5 +1,5 @@ import os -from typing import List, Optional, Type, Union +from typing import Optional, Union from parsons.databases.database_connector import DatabaseConnector from parsons.databases.mysql import MySQL @@ -10,7 +10,7 @@ def discover_database( default_connector: Optional[ - Union[Type[DatabaseConnector], List[Type[DatabaseConnector]]] + Union[type[DatabaseConnector], list[type[DatabaseConnector]]] ] = None, ) -> DatabaseConnector: """Create an appropriate ``DatabaseConnector`` based on environmental variables. @@ -54,7 +54,7 @@ def discover_database( if len(detected) > 1: if default_connector is None: - raise EnvironmentError( + raise OSError( f"Multiple database configurations detected: {detected}." " Please specify a default connector." ) @@ -63,17 +63,17 @@ def discover_database( for connector in default_connector: if connector.__name__ in detected: return connector() - raise EnvironmentError( + raise OSError( f"None of the default connectors {default_connector} were detected." ) elif default_connector.__name__ in detected: return default_connector() else: - raise EnvironmentError( + raise OSError( f"Default connector {default_connector} not detected. Detected: {detected}." ) elif detected: return connectors[detected[0]]() else: - raise EnvironmentError("Could not find any database configuration.") + raise OSError("Could not find any database configuration.") diff --git a/parsons/databases/postgres/postgres.py b/parsons/databases/postgres/postgres.py index 0de5e69485..043de3f3cf 100644 --- a/parsons/databases/postgres/postgres.py +++ b/parsons/databases/postgres/postgres.py @@ -91,7 +91,7 @@ def copy( sql = f"""COPY "{table_name}" ("{'","'.join(tbl.columns)}") FROM STDIN CSV HEADER;""" with self.cursor(connection) as cursor: - cursor.copy_expert(sql, open(tbl.to_csv(), "r")) + cursor.copy_expert(sql, open(tbl.to_csv())) logger.info(f"{tbl.num_rows} rows copied to {table_name}.") def table(self, table_name): diff --git a/parsons/databases/redshift/redshift.py b/parsons/databases/redshift/redshift.py index b2b6c80db3..b59d3a4a38 100644 --- a/parsons/databases/redshift/redshift.py +++ b/parsons/databases/redshift/redshift.py @@ -5,7 +5,7 @@ import pickle import random from contextlib import contextmanager -from typing import List, Optional +from typing import Optional import petl import psycopg2 @@ -485,7 +485,7 @@ def copy( acceptinvchars: bool = True, dateformat: str = "auto", timeformat: str = "auto", - varchar_max: Optional[List[str]] = None, + varchar_max: Optional[list[str]] = None, truncatecolumns: bool = False, columntypes: Optional[dict] = None, specifycols: Optional[bool] = None, @@ -1037,7 +1037,7 @@ def upsert( noise = f"{random.randrange(0, 10000):04}"[:4] date_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M") # Generate a temp table like "table_tmp_20200210_1230_14212" - staging_tbl = "{}_stg_{}_{}".format(target_table, date_stamp, noise) + staging_tbl = f"{target_table}_stg_{date_stamp}_{noise}" if distinct_check: primary_keys_statement = ", ".join(primary_keys) diff --git a/parsons/databases/redshift/rs_copy_table.py b/parsons/databases/redshift/rs_copy_table.py index a545bac507..c8cf7dbfdd 100644 --- a/parsons/databases/redshift/rs_copy_table.py +++ b/parsons/databases/redshift/rs_copy_table.py @@ -9,7 +9,7 @@ S3_TEMP_KEY_PREFIX = "Parsons_RedshiftCopyTable" -class RedshiftCopyTable(object): +class RedshiftCopyTable: aws_access_key_id = None aws_secret_access_key = None iam_role = None @@ -135,9 +135,7 @@ def get_creds(self, aws_access_key_id, aws_secret_access_key): aws_access_key_id = creds.access_key aws_secret_access_key = creds.secret_key - return "credentials 'aws_access_key_id={};aws_secret_access_key={}'\n".format( - aws_access_key_id, aws_secret_access_key - ) + return f"credentials 'aws_access_key_id={aws_access_key_id};aws_secret_access_key={aws_secret_access_key}'\n" def temp_s3_copy( self, @@ -148,10 +146,10 @@ def temp_s3_copy( ): if not self.s3_temp_bucket: raise KeyError( - ( + "Missing S3_TEMP_BUCKET, needed for transferring data to Redshift. " "Must be specified as env vars or kwargs" - ) + ) # Coalesce S3 Key arguments diff --git a/parsons/databases/redshift/rs_create_table.py b/parsons/databases/redshift/rs_create_table.py index 3d75138523..8a48e1945f 100644 --- a/parsons/databases/redshift/rs_create_table.py +++ b/parsons/databases/redshift/rs_create_table.py @@ -87,7 +87,7 @@ def create_statement( mapping["type_list"][i] = columntypes[col] # Enclose in quotes - mapping["headers"] = ['"{}"'.format(h) for h in mapping["headers"]] + mapping["headers"] = [f'"{h}"' for h in mapping["headers"]] return self.create_sql(table_name, mapping, distkey=distkey, sortkey=sortkey) @@ -163,7 +163,7 @@ def vc_validate(self, mapping): def create_sql(self, table_name, mapping, distkey=None, sortkey=None): # Generate the sql to create the table - statement = "create table {} (".format(table_name) + statement = f"create table {table_name} (" for i in range(len(mapping["headers"])): if mapping["type_list"][i] == "varchar": @@ -178,14 +178,14 @@ def create_sql(self, table_name, mapping, distkey=None, sortkey=None): statement = statement[:-1] + ") " if distkey: - statement += "\ndistkey({}) ".format(distkey) + statement += f"\ndistkey({distkey}) " if sortkey and isinstance(sortkey, list): statement += "\ncompound sortkey(" statement += ", ".join(sortkey) statement += ")" elif sortkey: - statement += "\nsortkey({})".format(sortkey) + statement += f"\nsortkey({sortkey})" statement += ";" @@ -217,8 +217,8 @@ def _log_key_warning(distkey=None, sortkey=None, method=""): ] warning = "".join( [ - "You didn't provide a {} key to method `parsons.redshift.Redshift.{}`.\n" - "You can learn about best practices here:\n{}.\n".format(keyname, method, keyinfo) + f"You didn't provide a {keyname} key to method `parsons.redshift.Redshift.{method}`.\n" + f"You can learn about best practices here:\n{keyinfo}.\n" for key, keyname, keyinfo in keys if not key ] diff --git a/parsons/databases/redshift/rs_schema.py b/parsons/databases/redshift/rs_schema.py index 28a9600f87..37034e4a26 100644 --- a/parsons/databases/redshift/rs_schema.py +++ b/parsons/databases/redshift/rs_schema.py @@ -1,4 +1,4 @@ -class RedshiftSchema(object): +class RedshiftSchema: def schema_exists(self, schema): sql = f"select * from pg_namespace where nspname = '{schema}'" res = self.query(sql) diff --git a/parsons/databases/redshift/rs_table_utilities.py b/parsons/databases/redshift/rs_table_utilities.py index dc46e0a826..22c25b1c4a 100644 --- a/parsons/databases/redshift/rs_table_utilities.py +++ b/parsons/databases/redshift/rs_table_utilities.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class RedshiftTableUtilities(object): +class RedshiftTableUtilities: def __init__(self): pass @@ -31,8 +31,8 @@ def table_exists_with_connection(self, table_name, connection, view=True): table_name = [x.strip() for x in table_name] # Check in pg tables for the table - sql = """select count(*) from pg_tables where schemaname='{}' and - tablename='{}';""".format(table_name[0], table_name[1]) + sql = f"""select count(*) from pg_tables where schemaname='{table_name[0]}' and + tablename='{table_name[1]}';""" # TODO maybe convert these queries to use self.query_with_connection @@ -42,8 +42,8 @@ def table_exists_with_connection(self, table_name, connection, view=True): # Check in the pg_views for the table if view: - sql = """select count(*) from pg_views where schemaname='{}' and - viewname='{}';""".format(table_name[0], table_name[1]) + sql = f"""select count(*) from pg_views where schemaname='{table_name[0]}' and + viewname='{table_name[1]}';""" cursor.execute(sql) result += cursor.fetchone()[0] diff --git a/parsons/donorbox/donorbox.py b/parsons/donorbox/donorbox.py index 1adc10d8df..23a73471c5 100644 --- a/parsons/donorbox/donorbox.py +++ b/parsons/donorbox/donorbox.py @@ -10,7 +10,7 @@ URI = "https://donorbox.org/api/v1" -class Donorbox(object): +class Donorbox: """ Instantiate Donorbox class. diff --git a/parsons/empower/empower.py b/parsons/empower/empower.py index a54b98ea75..ff05aa3adf 100644 --- a/parsons/empower/empower.py +++ b/parsons/empower/empower.py @@ -10,7 +10,7 @@ EMPOWER_API_ENDPOINT = "https://api.getempower.com/v1/export" -class Empower(object): +class Empower: """ Instantiate class. diff --git a/parsons/etl/etl.py b/parsons/etl/etl.py index cb8f120ff9..16cc3e4bcc 100644 --- a/parsons/etl/etl.py +++ b/parsons/etl/etl.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class ETL(object): +class ETL: def __init__(self): pass diff --git a/parsons/etl/tofrom.py b/parsons/etl/tofrom.py index edfc41bcd1..5963faf160 100644 --- a/parsons/etl/tofrom.py +++ b/parsons/etl/tofrom.py @@ -8,7 +8,7 @@ from parsons.utilities import files, zip_archive -class ToFrom(object): +class ToFrom: def to_dataframe(self, index=None, exclude=None, columns=None, coerce_float=False): """ Outputs table as a Pandas Dataframe diff --git a/parsons/facebook_ads/facebook_ads.py b/parsons/facebook_ads/facebook_ads.py index d10e0deed9..842b5d765e 100644 --- a/parsons/facebook_ads/facebook_ads.py +++ b/parsons/facebook_ads/facebook_ads.py @@ -18,7 +18,7 @@ MAX_FB_AUDIENCE_API_USERS = 10000 -class FacebookAds(object): +class FacebookAds: """ Instantiate the FacebookAds class @@ -89,7 +89,7 @@ def __init__(self, app_id=None, app_secret=None, access_token=None, ad_account_i raise error FacebookAdsApi.init(self.app_id, self.app_secret, self.access_token) - self.ad_account = AdAccount("act_%s" % self.ad_account_id) + self.ad_account = AdAccount(f"act_{self.ad_account_id}") @staticmethod def _get_match_key_for_column(column): diff --git a/parsons/formstack/formstack.py b/parsons/formstack/formstack.py index b38681730a..9b0cc01b63 100644 --- a/parsons/formstack/formstack.py +++ b/parsons/formstack/formstack.py @@ -10,7 +10,7 @@ API_URI = "https://www.formstack.com/api/v2" -class Formstack(object): +class Formstack: """ Instantiate Formstack class. diff --git a/parsons/geocode/census_geocoder.py b/parsons/geocode/census_geocoder.py index a49c15648e..d567055999 100644 --- a/parsons/geocode/census_geocoder.py +++ b/parsons/geocode/census_geocoder.py @@ -13,7 +13,7 @@ BATCH_SIZE = 999 -class CensusGeocoder(object): +class CensusGeocoder: """ Instantiate the CensusGecoder Class diff --git a/parsons/github/github.py b/parsons/github/github.py index e5e8d49936..da1ee3a650 100644 --- a/parsons/github/github.py +++ b/parsons/github/github.py @@ -54,7 +54,7 @@ class ParsonsGitHubError(Exception): @decorate_methods(wrap_github_404) -class GitHub(object): +class GitHub: """Creates a GitHub class for accessing the GitHub API. Uses ``parsons.utilities.check_env`` to load credentials from environment variables if not diff --git a/parsons/google/google_admin.py b/parsons/google/google_admin.py index b69cd5b3d9..d6e32261b5 100644 --- a/parsons/google/google_admin.py +++ b/parsons/google/google_admin.py @@ -9,7 +9,7 @@ ) -class GoogleAdmin(object): +class GoogleAdmin: """ A connector for Google Admin. diff --git a/parsons/google/google_bigquery.py b/parsons/google/google_bigquery.py index 5a930f7585..835bce21ae 100644 --- a/parsons/google/google_bigquery.py +++ b/parsons/google/google_bigquery.py @@ -5,7 +5,7 @@ import random import uuid from contextlib import contextmanager -from typing import List, Optional, Union +from typing import Optional, Union import google import petl @@ -382,7 +382,7 @@ def copy_from_gcs( allow_quoted_newlines: bool = True, allow_jagged_rows: bool = True, quote: Optional[str] = None, - schema: Optional[List[dict]] = None, + schema: Optional[list[dict]] = None, job_config: Optional[LoadJobConfig] = None, force_unzip_blobs: bool = False, compression_type: str = "gzip", @@ -551,7 +551,7 @@ def copy_large_compressed_file_from_gcs( allow_quoted_newlines: bool = True, allow_jagged_rows: bool = True, quote: Optional[str] = None, - schema: Optional[List[dict]] = None, + schema: Optional[list[dict]] = None, job_config: Optional[LoadJobConfig] = None, compression_type: str = "gzip", new_file_extension: str = "csv", @@ -792,7 +792,7 @@ def copy_direct( allow_quoted_newlines: bool = True, allow_jagged_rows: bool = True, quote: Optional[str] = None, - schema: Optional[List[dict]] = None, + schema: Optional[list[dict]] = None, max_timeout: int = 21600, convert_dict_list_columns_to_json: bool = True, **load_kwargs, @@ -884,7 +884,7 @@ def copy( allow_quoted_newlines: bool = True, allow_jagged_rows: bool = True, quote: Optional[str] = None, - schema: Optional[List[dict]] = None, + schema: Optional[list[dict]] = None, max_timeout: int = 21600, convert_dict_list_columns_to_json: bool = True, **load_kwargs, @@ -1368,7 +1368,7 @@ def _get_job_config_schema( parsons_table: Optional[Table] = None, custom_schema: Optional[list] = None, template_table: Optional[str] = None, - ) -> Optional[List[bigquery.SchemaField]]: + ) -> Optional[list[bigquery.SchemaField]]: # if job.schema already set in job_config, do nothing if job_config.schema: return job_config.schema diff --git a/parsons/google/google_civic.py b/parsons/google/google_civic.py index 10997f731c..b223a57508 100644 --- a/parsons/google/google_civic.py +++ b/parsons/google/google_civic.py @@ -6,7 +6,7 @@ URI = "https://www.googleapis.com/civicinfo/v2/" -class GoogleCivic(object): +class GoogleCivic: """ `Args:` api_key : str diff --git a/parsons/google/google_cloud_storage.py b/parsons/google/google_cloud_storage.py index 73fb5ac074..ab065e02ba 100644 --- a/parsons/google/google_cloud_storage.py +++ b/parsons/google/google_cloud_storage.py @@ -20,7 +20,7 @@ logger = logging.getLogger(__name__) -class GoogleCloudStorage(object): +class GoogleCloudStorage: """Google Cloud Storage connector utility This class requires application credentials in the form of a diff --git a/parsons/google/utilities.py b/parsons/google/utilities.py index d2a9730d65..b3259ef2b8 100644 --- a/parsons/google/utilities.py +++ b/parsons/google/utilities.py @@ -1,6 +1,6 @@ import json import os -import typing as t +from typing import Optional, Union import google from google.oauth2 import service_account @@ -9,9 +9,9 @@ def setup_google_application_credentials( - app_creds: t.Union[t.Dict, str, None], + app_creds: Union[dict, str, None], env_var_name: str = "GOOGLE_APPLICATION_CREDENTIALS", - target_env_var_name: t.Optional[str] = None, + target_env_var_name: Optional[str] = None, ) -> None: # Detect if app_creds is a dict, path string or json string, and if it is a # json string, then convert it to a temporary file. Then set the @@ -59,8 +59,8 @@ def hexavigesimal(n: int) -> str: def load_google_application_credentials( env_var_name: str = "GOOGLE_APPLICATION_CREDENTIALS", - scopes: t.Optional[t.List[str]] = None, - subject: t.Optional[str] = None, + scopes: Optional[list[str]] = None, + subject: Optional[str] = None, ) -> google.auth.credentials.Credentials: service_account_filepath = os.environ[env_var_name] diff --git a/parsons/hustle/hustle.py b/parsons/hustle/hustle.py index 14653fd1c0..0bc5cbdf7c 100644 --- a/parsons/hustle/hustle.py +++ b/parsons/hustle/hustle.py @@ -1,6 +1,6 @@ import logging from datetime import datetime, timedelta -from typing import Dict, NoReturn, Optional, Union +from typing import NoReturn, Optional, Union from requests import Response, request @@ -14,7 +14,7 @@ PAGE_LIMIT = 1000 -class Hustle(object): +class Hustle: """ Instantiate Hustle Class @@ -73,10 +73,10 @@ def _request( self, endpoint: str, req_type: str = "GET", - args: Optional[Dict] = None, - payload: Optional[Dict] = None, + args: Optional[dict] = None, + payload: Optional[dict] = None, raise_on_error: bool = True, - ) -> Union[Dict, list]: + ) -> Union[dict, list]: url = self.uri + endpoint self._refresh_token() @@ -142,7 +142,7 @@ def get_agents(self, group_id: str) -> Table: logger.info(f"Got {tbl.num_rows} agents from {group_id} group.") return tbl - def get_agent(self, agent_id: str) -> Dict: + def get_agent(self, agent_id: str) -> dict: """ Get a single agent. @@ -165,7 +165,7 @@ def create_agent( phone_number: str, send_invite: bool = False, email: Optional[str] = None, - ) -> Dict: + ) -> dict: """ Create an agent. @@ -207,7 +207,7 @@ def update_agent( name: Optional[str] = None, full_name: Optional[str] = None, send_invite: bool = False, - ) -> Dict: + ) -> dict: """ Update an agent. @@ -248,7 +248,7 @@ def get_organizations(self) -> Table: logger.info(f"Got {tbl.num_rows} organizations.") return tbl - def get_organization(self, organization_id: str) -> Dict: + def get_organization(self, organization_id: str) -> dict: """ Get a single organization. @@ -278,7 +278,7 @@ def get_groups(self, organization_id: str) -> Table: logger.info(f"Got {tbl.num_rows} groups.") return tbl - def get_group(self, group_id: str) -> Dict: + def get_group(self, group_id: str) -> dict: """ Get a single group. @@ -291,7 +291,7 @@ def get_group(self, group_id: str) -> Dict: logger.info(f"Got {group_id} group.") return resp # type: ignore - def create_group_membership(self, group_id: str, lead_id: str) -> Dict: + def create_group_membership(self, group_id: str, lead_id: str) -> dict: """ Add a lead to a group. @@ -309,7 +309,7 @@ def create_group_membership(self, group_id: str, lead_id: str) -> Dict: ) return resp # type: ignore - def get_lead(self, lead_id: str) -> Dict: + def get_lead(self, lead_id: str) -> dict: """ Get a single lead. @@ -367,9 +367,9 @@ def create_lead( email: Optional[str] = None, notes: Optional[str] = None, follow_up: Optional[str] = None, - custom_fields: Optional[Dict] = None, + custom_fields: Optional[dict] = None, tag_ids: Optional[list] = None, - ) -> Dict: + ) -> dict: """ Create a lead. @@ -463,7 +463,7 @@ def create_leads(self, table: Table, group_id: Optional[str] = None) -> Table: created_leads = [] for row in table: - lead: Dict[str, Optional[Union[str, Dict]]] = {"group_id": group_id} + lead: dict[str, Optional[Union[str, dict]]] = {"group_id": group_id} custom_fields = {} # Check for column names that map to arguments, if not assign @@ -497,7 +497,7 @@ def update_lead( notes: Optional[str] = None, follow_up: Optional[str] = None, tag_ids: Optional[list] = None, - ) -> Dict: + ) -> dict: """ Update a lead. @@ -556,7 +556,7 @@ def get_tags(self, organization_id: str) -> Table: logger.info(f"Got {tbl.num_rows} tags for {organization_id} organization.") return tbl - def get_tag(self, tag_id: str) -> Dict: + def get_tag(self, tag_id: str) -> dict: """ Get a single tag. @@ -588,7 +588,7 @@ def get_custom_fields(self, organization_id: str) -> Table: def create_custom_field( self, organization_id: str, name: str, agent_visible: Optional[bool] = None - ) -> Dict: + ) -> dict: """Create a custom field. `Args:` @@ -603,7 +603,7 @@ def create_custom_field( The newly created custom field """ - custom_field: Dict[str, Union[str, bool]] = {"name": name} + custom_field: dict[str, Union[str, bool]] = {"name": name} if agent_visible is not None: custom_field["agentVisible"] = agent_visible diff --git a/parsons/mobilize_america/ma.py b/parsons/mobilize_america/ma.py index ce947ed0d7..470776c739 100644 --- a/parsons/mobilize_america/ma.py +++ b/parsons/mobilize_america/ma.py @@ -14,7 +14,7 @@ MA_URI = "https://api.mobilize.us/v1/" -class MobilizeAmerica(object): +class MobilizeAmerica: """ Instantiate MobilizeAmerica Class diff --git a/parsons/nation_builder/nation_builder.py b/parsons/nation_builder/nation_builder.py index 6088a65038..47374ee1d1 100644 --- a/parsons/nation_builder/nation_builder.py +++ b/parsons/nation_builder/nation_builder.py @@ -1,7 +1,7 @@ import json import logging import time -from typing import Any, Dict, Optional, Tuple, cast +from typing import Any, Optional, cast from urllib.parse import parse_qs, urlparse from parsons import Table @@ -48,7 +48,7 @@ def get_uri(cls, slug: Optional[str]) -> str: return f"https://{slug}.nationbuilder.com/api/v1" @classmethod - def get_auth_headers(cls, access_token: Optional[str]) -> Dict[str, str]: + def get_auth_headers(cls, access_token: Optional[str]) -> dict[str, str]: if access_token is None: raise ValueError("access_token can't None") @@ -61,7 +61,7 @@ def get_auth_headers(cls, access_token: Optional[str]) -> Dict[str, str]: return {"authorization": f"Bearer {access_token}"} @classmethod - def parse_next_params(cls, next_value: str) -> Tuple[str, str]: + def parse_next_params(cls, next_value: str) -> tuple[str, str]: next_params = parse_qs(urlparse(next_value).query) if "__nonce" not in next_params: @@ -91,7 +91,7 @@ def get_people(self) -> Table: while True: try: - logging.debug("sending request %s" % url) + logging.debug(f"sending request {url}") response = self.client.get_request(url) res = response.get("results", None) @@ -99,7 +99,7 @@ def get_people(self) -> Table: if res is None: break - logging.debug("response got %s records" % len(res)) + logging.debug(f"response got {len(res)} records") data.extend(res) @@ -109,15 +109,15 @@ def get_people(self) -> Table: else: break except Exception as error: - logging.error("error requesting data from Nation Builder: %s" % error) + logging.error(f"error requesting data from Nation Builder: {error}") wait_time = 30 - logging.info("waiting %d seconds before retrying" % wait_time) + logging.info("waiting %s seconds before retrying" , wait_time) time.sleep(wait_time) return Table(data) - def update_person(self, person_id: str, person: Dict[str, Any]) -> Dict[str, Any]: + def update_person(self, person_id: str, person: dict[str, Any]) -> dict[str, Any]: """ This method updates a person with the provided id to have the provided data. It returns a full representation of the updated person. @@ -146,11 +146,11 @@ def update_person(self, person_id: str, person: Dict[str, Any]) -> Dict[str, Any url = f"people/{person_id}" response = self.client.put_request(url, data=json.dumps({"person": person})) - response = cast("Dict[str, Any]", response) + response = cast("dict[str, Any]", response) return response - def upsert_person(self, person: Dict[str, Any]) -> Tuple[bool, Optional[Dict[str, Any]]]: + def upsert_person(self, person: dict[str, Any]) -> tuple[bool, Optional[dict[str, Any]]]: """ Updates a matched person or creates a new one if the person doesn't exist. diff --git a/parsons/newmode/newmode.py b/parsons/newmode/newmode.py index 5292eae8a9..c6b9573fda 100644 --- a/parsons/newmode/newmode.py +++ b/parsons/newmode/newmode.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from Newmode import Client from oauthlib.oauth2 import TokenExpiredError @@ -54,7 +54,7 @@ def __init__( self.api_version: Optional[str] = api_version self.client: Client = Client(api_user, api_password, api_version) - def convert_to_table(self, data: Union[List[Dict[str, Any]], Dict[str, Any]]) -> Table: + def convert_to_table(self, data: Union[list[dict[str, Any]], dict[str, Any]]) -> Table: # Internal method to create a Parsons table from a data element. table = None if isinstance(data, list): @@ -64,7 +64,7 @@ def convert_to_table(self, data: Union[List[Dict[str, Any]], Dict[str, Any]]) -> return table - def get_tools(self, params: Dict[str, Any] = None) -> Table: + def get_tools(self, params: dict[str, Any] = None) -> Table: """ Get existing tools. Args: @@ -83,8 +83,8 @@ def get_tools(self, params: Dict[str, Any] = None) -> Table: return self.convert_to_table([]) def get_tool( - self, tool_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, tool_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific tool. Args: @@ -108,7 +108,7 @@ def lookup_targets( self, tool_id: Union[int, str], search: Optional[str] = None, - params: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, ) -> Table: """ Lookup targets for a given tool @@ -141,8 +141,8 @@ def lookup_targets( return self.convert_to_table([]) def get_action( - self, tool_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, tool_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get the action information for a given tool. Args: @@ -165,8 +165,8 @@ def get_action( def run_action( self, tool_id: Union[int, str], - payload: Dict[str, Any], - params: Optional[Dict[str, Any]] = None, + payload: dict[str, Any], + params: Optional[dict[str, Any]] = None, ) -> Optional[Union[str, int]]: """ Run specific action with given payload. @@ -194,8 +194,8 @@ def run_action( return None def get_target( - self, target_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, target_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific target. Args: @@ -215,7 +215,7 @@ def get_target( logging.warning("Empty target returned") return None - def get_targets(self, params: Optional[Dict[str, Any]] = None) -> Optional[Table]: + def get_targets(self, params: Optional[dict[str, Any]] = None) -> Optional[Table]: """ Get all targets @@ -238,7 +238,7 @@ def get_targets(self, params: Optional[Dict[str, Any]] = None) -> Optional[Table logging.warning("No targets returned") return None - def get_campaigns(self, params: Optional[Dict[str, Any]] = None) -> Table: + def get_campaigns(self, params: Optional[dict[str, Any]] = None) -> Table: """ Get existing campaigns. Args: @@ -257,8 +257,8 @@ def get_campaigns(self, params: Optional[Dict[str, Any]] = None) -> Table: return self.convert_to_table([]) def get_campaign( - self, campaign_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, campaign_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific campaign. Args: @@ -278,7 +278,7 @@ def get_campaign( logging.warning("Empty campaign returned") return None - def get_organizations(self, params: Optional[Dict[str, Any]] = None) -> Table: + def get_organizations(self, params: Optional[dict[str, Any]] = None) -> Table: """ Get existing organizations. Args: @@ -297,8 +297,8 @@ def get_organizations(self, params: Optional[Dict[str, Any]] = None) -> Table: return self.convert_to_table([]) def get_organization( - self, organization_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, organization_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific organization. Args: @@ -318,7 +318,7 @@ def get_organization( logging.warning("Empty organization returned") return None - def get_services(self, params: Optional[Dict[str, Any]] = None) -> Table: + def get_services(self, params: Optional[dict[str, Any]] = None) -> Table: """ Get existing services. Args: @@ -337,8 +337,8 @@ def get_services(self, params: Optional[Dict[str, Any]] = None) -> Table: return self.convert_to_table([]) def get_service( - self, service_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, service_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific service. Args: @@ -359,7 +359,7 @@ def get_service( return None def get_outreaches( - self, tool_id: Union[int, str], params: Optional[Dict[str, Any]] = None + self, tool_id: Union[int, str], params: Optional[dict[str, Any]] = None ) -> Table: """ Get existing outreaches for a given tool. @@ -381,8 +381,8 @@ def get_outreaches( return self.convert_to_table([]) def get_outreach( - self, outreach_id: Union[int, str], params: Optional[Dict[str, Any]] = None - ) -> Optional[Dict[str, Any]]: + self, outreach_id: Union[int, str], params: Optional[dict[str, Any]] = None + ) -> Optional[dict[str, Any]]: """ Get specific outreach. Args: @@ -429,7 +429,7 @@ def __init__( self.base_url: str = V2_API_URL self.client_id: str = check_env.check("NEWMODE_API_CLIENT_ID", client_id) self.client_secret: str = check_env.check("NEWMODE_API_CLIENT_SECRET", client_secret) - self.headers: Dict[str, str] = {"content-type": "application/json"} + self.headers: dict[str, str] = {"content-type": "application/json"} self.default_client: OAuth2APIConnector = self.get_default_oauth_client() def get_default_oauth_client(self) -> OAuth2APIConnector: @@ -445,7 +445,7 @@ def get_default_oauth_client(self) -> OAuth2APIConnector: def checked_response( self, response: Any, client: OAuth2APIConnector - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: response.raise_for_status() success_codes = [200, 201, 202, 204] client.validate_response(response) @@ -462,11 +462,11 @@ def base_request( method: str, url: str, client: OAuth2APIConnector, - data: Optional[Dict[str, Any]] = None, - json: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + json: Optional[dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, retries: int = 2, - ) -> Optional[Union[Dict[str, Any], None]]: + ) -> Optional[Union[dict[str, Any], None]]: """ Internal method to instantiate OAuth2APIConnector class, make a single call to Newmode API, and validate the response. @@ -498,13 +498,13 @@ def paginate_request( endpoint: str, client: OAuth2APIConnector, data_key: str = RESPONSE_DATA_KEY, - data: Optional[Dict[str, Any]] = None, - json: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + json: Optional[dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, supports_version: bool = True, override_api_version: Optional[str] = None, retries: int = 2, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """ Wrapper method to handle pagination for API requests. """ @@ -539,14 +539,14 @@ def converted_request( endpoint: str, method: str, supports_version: bool = True, - data: Optional[Dict[str, Any]] = None, - json: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + json: Optional[dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, convert_to_table: bool = True, data_key: Optional[str] = None, client: Optional[OAuth2APIConnector] = None, override_api_version: Optional[str] = None, - ) -> Union[Table, Dict[str, Any]]: + ) -> Union[Table, dict[str, Any]]: """Internal method to make a call to the Newmode API and convert the result to a Parsons table.""" if params is None: @@ -569,7 +569,7 @@ def converted_request( else: return response - def get_campaign(self, campaign_id: str, params: Optional[Dict[str, Any]] = None) -> Table: + def get_campaign(self, campaign_id: str, params: Optional[dict[str, Any]] = None) -> Table: """ Retrieve a specific campaign by ID. @@ -592,7 +592,7 @@ def get_campaign(self, campaign_id: str, params: Optional[Dict[str, Any]] = None ) return data - def get_campaign_ids(self, params: Optional[Dict[str, Any]] = None) -> List[str]: + def get_campaign_ids(self, params: Optional[dict[str, Any]] = None) -> list[str]: """ Retrieve all campaigns In v2, a campaign is equivalent to Tools or Actions in V1. @@ -634,7 +634,7 @@ def get_recipient( city: Optional[str] = None, postal_code: Optional[str] = None, region: Optional[str] = None, - params: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, ) -> Table: """ Retrieve a specific recipient by ID @@ -679,10 +679,10 @@ def get_recipient( def run_submit( self, campaign_id: str, - json: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - params: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: + json: Optional[dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, + ) -> dict[str, Any]: """ Pass a submission from a supporter to a campaign that ultimately fills in a petition, @@ -710,7 +710,7 @@ def run_submit( ) return response[0] - def get_submissions(self, campaign_id: str, params: Optional[Dict[str, Any]] = None) -> Table: + def get_submissions(self, campaign_id: str, params: Optional[dict[str, Any]] = None) -> Table: """ Retrieve and sort submissions and contact data for a specified campaign using a range of filters diff --git a/parsons/ngpvan/activist_codes.py b/parsons/ngpvan/activist_codes.py index cfee2d7789..e582f874ee 100644 --- a/parsons/ngpvan/activist_codes.py +++ b/parsons/ngpvan/activist_codes.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -class ActivistCodes(object): +class ActivistCodes: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/bulk_import.py b/parsons/ngpvan/bulk_import.py index 242990809a..6f9b41d6eb 100644 --- a/parsons/ngpvan/bulk_import.py +++ b/parsons/ngpvan/bulk_import.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class BulkImport(object): +class BulkImport: def __init__(self): pass diff --git a/parsons/ngpvan/canvass_responses.py b/parsons/ngpvan/canvass_responses.py index f4356b7197..adb2d576ce 100644 --- a/parsons/ngpvan/canvass_responses.py +++ b/parsons/ngpvan/canvass_responses.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class CanvassResponses(object): +class CanvassResponses: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/changed_entities.py b/parsons/ngpvan/changed_entities.py index d85455e6cc..844623b6f2 100644 --- a/parsons/ngpvan/changed_entities.py +++ b/parsons/ngpvan/changed_entities.py @@ -10,7 +10,7 @@ RETRY_RATE = 10 -class ChangedEntities(object): +class ChangedEntities: def __init__(self): pass diff --git a/parsons/ngpvan/codes.py b/parsons/ngpvan/codes.py index 7d5d7bb6d2..aa9a1380c0 100644 --- a/parsons/ngpvan/codes.py +++ b/parsons/ngpvan/codes.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Codes(object): +class Codes: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/contact_notes.py b/parsons/ngpvan/contact_notes.py index 038b6f2e24..5d5867e49f 100644 --- a/parsons/ngpvan/contact_notes.py +++ b/parsons/ngpvan/contact_notes.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class ContactNotes(object): +class ContactNotes: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/email.py b/parsons/ngpvan/email.py index 792d6f3db6..45abb2da03 100644 --- a/parsons/ngpvan/email.py +++ b/parsons/ngpvan/email.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class Email(object): +class Email: """ Instantiate the Email class. diff --git a/parsons/ngpvan/events.py b/parsons/ngpvan/events.py index 3931ac14b4..c75948d145 100644 --- a/parsons/ngpvan/events.py +++ b/parsons/ngpvan/events.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Events(object): +class Events: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/introspection.py b/parsons/ngpvan/introspection.py index 3b7732fb5b..dd67bffe83 100644 --- a/parsons/ngpvan/introspection.py +++ b/parsons/ngpvan/introspection.py @@ -3,7 +3,7 @@ logger = logging.getLogger(__name__) -class Introspection(object): +class Introspection: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/locations.py b/parsons/ngpvan/locations.py index 6518eece5f..45db45e990 100644 --- a/parsons/ngpvan/locations.py +++ b/parsons/ngpvan/locations.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Locations(object): +class Locations: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/people.py b/parsons/ngpvan/people.py index 90c9aac38a..f0f21e3e23 100644 --- a/parsons/ngpvan/people.py +++ b/parsons/ngpvan/people.py @@ -1,12 +1,12 @@ import logging -from typing import Dict, List, Union +from typing import Union from parsons.utilities import json_format logger = logging.getLogger(__name__) -class People(object): +class People: def __init__(self, van_connection): self.connection = van_connection @@ -197,7 +197,7 @@ def upsert_person( first_name=None, last_name=None, date_of_birth=None, - email: Union[str, List[Dict[str, Union[str, bool]]], None] = None, + email: Union[str, list[dict[str, Union[str, bool]]], None] = None, phone=None, phone_type=None, street_number=None, @@ -227,7 +227,7 @@ def upsert_person( The person's last name dob: str ISO 8601 formatted date of birth (e.g. ``1981-02-01``) - email: Union[str, List[Dict[str, Union[str, bool]]], None] + email: Union[str, list[dict[str, Union[str, bool]]], None] The person's email address or a list of email dicts. e.g. [{'email': 'abcd@gmail.com', 'isSubscribed': False}] See https://docs.everyaction.com/reference/people-common-models#email @@ -300,7 +300,7 @@ def _people_search( first_name=None, last_name=None, date_of_birth=None, - email: Union[str, List[Dict[str, Union[str, bool]]], None] = None, + email: Union[str, list[dict[str, Union[str, bool]]], None] = None, phone=None, phone_type="H", street_number=None, diff --git a/parsons/ngpvan/printed_lists.py b/parsons/ngpvan/printed_lists.py index 6152e2e4cf..696efd7971 100644 --- a/parsons/ngpvan/printed_lists.py +++ b/parsons/ngpvan/printed_lists.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class PrintedLists(object): +class PrintedLists: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/saved_lists.py b/parsons/ngpvan/saved_lists.py index c678836daa..2275c86d4d 100644 --- a/parsons/ngpvan/saved_lists.py +++ b/parsons/ngpvan/saved_lists.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class SavedLists(object): +class SavedLists: def __init__(self, van_connection): self.connection = van_connection @@ -275,7 +275,7 @@ def upload_saved_list( return r -class Folders(object): +class Folders: def __init__(self, van_connection): # Some sort of test if the van_connection is not present. @@ -311,7 +311,7 @@ def get_folder(self, folder_id): return r -class ExportJobs(object): +class ExportJobs: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/scores.py b/parsons/ngpvan/scores.py index 2ea01aa97f..b786cbf571 100644 --- a/parsons/ngpvan/scores.py +++ b/parsons/ngpvan/scores.py @@ -11,7 +11,7 @@ logger = logging.getLogger(__name__) -class Scores(object): +class Scores: def __init__(self, van_connection): self.connection = van_connection @@ -228,7 +228,7 @@ def upload_scores( return r["jobId"] -class FileLoadingJobs(object): +class FileLoadingJobs: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/signups.py b/parsons/ngpvan/signups.py index 32b28cedb9..5fcbf04a1b 100644 --- a/parsons/ngpvan/signups.py +++ b/parsons/ngpvan/signups.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Signups(object): +class Signups: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/supporter_groups.py b/parsons/ngpvan/supporter_groups.py index be68e2dad7..6eb84fae31 100644 --- a/parsons/ngpvan/supporter_groups.py +++ b/parsons/ngpvan/supporter_groups.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class SupporterGroups(object): +class SupporterGroups: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/survey_questions.py b/parsons/ngpvan/survey_questions.py index 58574d7ce9..0be5dbf7e6 100644 --- a/parsons/ngpvan/survey_questions.py +++ b/parsons/ngpvan/survey_questions.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class SurveyQuestions(object): +class SurveyQuestions: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/targets.py b/parsons/ngpvan/targets.py index a5630ee732..d3520d7e62 100644 --- a/parsons/ngpvan/targets.py +++ b/parsons/ngpvan/targets.py @@ -13,7 +13,7 @@ class TargetsFailed(Exception): pass -class Targets(object): +class Targets: def __init__(self, van_connection): self.connection = van_connection diff --git a/parsons/ngpvan/van_connector.py b/parsons/ngpvan/van_connector.py index f9937ee69d..ea24a4b97d 100644 --- a/parsons/ngpvan/van_connector.py +++ b/parsons/ngpvan/van_connector.py @@ -11,7 +11,7 @@ SOAP_URI = "https://api.securevan.com/Services/V3/ListService.asmx?WSDL" -class VANConnector(object): +class VANConnector: def __init__(self, api_key=None, auth_name="default", db=None): self.api_key = check_env.check("VAN_API_KEY", api_key) diff --git a/parsons/notifications/slack.py b/parsons/notifications/slack.py index 6c0e616ec3..69e1c01434 100644 --- a/parsons/notifications/slack.py +++ b/parsons/notifications/slack.py @@ -10,7 +10,7 @@ from parsons.utilities.check_env import check -class Slack(object): +class Slack: def __init__(self, api_key=None): if api_key is None: try: diff --git a/parsons/phone2action/p2a.py b/parsons/phone2action/p2a.py index b24e4b2e81..002eb7ec35 100644 --- a/parsons/phone2action/p2a.py +++ b/parsons/phone2action/p2a.py @@ -5,7 +5,7 @@ logger = logging.getLogger(__name__) -class Phone2Action(object): +class Phone2Action: """ Instantiate Phone2Action Class diff --git a/parsons/quickbase/quickbase.py b/parsons/quickbase/quickbase.py index 066c1155d3..a4928ec011 100644 --- a/parsons/quickbase/quickbase.py +++ b/parsons/quickbase/quickbase.py @@ -7,7 +7,7 @@ logger = logging.getLogger(__name__) -class Quickbase(object): +class Quickbase: """ Instantiate the Quickbase class diff --git a/parsons/redash/redash.py b/parsons/redash/redash.py index a19490023d..9edb8d31af 100644 --- a/parsons/redash/redash.py +++ b/parsons/redash/redash.py @@ -18,7 +18,7 @@ class RedashQueryFailed(Exception): pass -class Redash(object): +class Redash: """ Instantiate Redash Class @@ -67,7 +67,7 @@ def _poll_job(self, session, job, query_id): response_json = response.json() job = response_json.get( "job", - {"status": "Error NO JOB IN RESPONSE: {}".format(json.dumps(response_json))}, + {"status": f"Error NO JOB IN RESPONSE: {json.dumps(response_json)}"}, ) logger.debug( "poll url:%s id:%s status:%s err:%s", @@ -162,7 +162,7 @@ def get_fresh_query_results(self, query_id=None, params=None): query_id = check("REDASH_QUERY_ID", query_id, optional=True) params_from_env = check("REDASH_QUERY_PARAMS", "", optional=True) redash_params = ( - {"p_%s" % k: str(v).replace("'", "''") for k, v in params.items()} if params else {} + {f"p_{k}": str(v).replace("'", "''") for k, v in params.items()} if params else {} ) response = self.session.post( diff --git a/parsons/scytl/scytl.py b/parsons/scytl/scytl.py index bb515bba34..2f239738cc 100644 --- a/parsons/scytl/scytl.py +++ b/parsons/scytl/scytl.py @@ -1,9 +1,9 @@ import csv -import typing as t import zipfile from dataclasses import dataclass from datetime import datetime from io import BytesIO, StringIO +from typing import Optional import defusedxml.ElementTree as ET import requests @@ -93,7 +93,7 @@ def __init__(self, state: str, election_id: str, county=""): self.previous_county_details_list = None self.previously_fetched_counties = set() - def _parse_date_to_utc(self, input_dt: str) -> datetime: + def _parse_date_to_utc(self, input_dt: str) -> Optional[datetime]: """ Parse datetime string as datetime in UTC @@ -105,7 +105,7 @@ def _parse_date_to_utc(self, input_dt: str) -> datetime: """ if input_dt is None: - return + return None temp = parsedate(input_dt, tzinfos=TZ_INFO) temp = temp.astimezone(timezone("UTC")) @@ -161,7 +161,7 @@ def _parse_file_from_zip_url(self, zipfile_url: str, file_name: str) -> bytes: def _get_latest_counties_scytl_info( self, state: str, election_id: str, version_num: str - ) -> t.Dict[str, CountyDetails]: + ) -> dict[str, CountyDetails]: """ Fetch the settings JSON file for the election and parse the county details for participating counties in a state election. @@ -212,7 +212,7 @@ def _get_latest_counties_scytl_info( def _parse_county_xml_data_to_precincts( self, county_data: bytes, county_details: CountyDetails - ) -> t.List[t.Dict]: + ) -> list[dict]: """ Parse a detail XML file for a county into a list of election results by precinct and vote method. @@ -295,7 +295,7 @@ def _parse_county_xml_data_to_precincts( return precinct_votes - def _parse_state_xml_data_to_counties(self, state_data: bytes, state: str) -> t.List[t.Dict]: + def _parse_state_xml_data_to_counties(self, state_data: bytes, state: str) -> list[dict]: """ Parse a detail XML file for a state into a list of election results by county and vote method. @@ -373,7 +373,7 @@ def _parse_state_xml_data_to_counties(self, state_data: bytes, state: str) -> t. def _fetch_and_parse_summary_results( self, administrator: str, election_id: str, version_num: str, county="" - ) -> t.List[t.Dict]: + ) -> list[dict]: """ Fetches the summary results CSV file from the Scytl site and parses it into a list of election results by candidate. @@ -424,7 +424,7 @@ def _fetch_and_parse_summary_results( return data - def get_summary_results(self, force_update=False) -> t.List[t.Dict]: + def get_summary_results(self, force_update=False) -> Optional[list[dict]]: """ Fetch the latest summary results for the given election, across all contests. @@ -459,7 +459,7 @@ def get_summary_results(self, force_update=False) -> t.List[t.Dict]: version_num = self._get_version(self.administrator, self.election_id) if not force_update and version_num == self.previous_summary_version_num: - return + return None data = self._fetch_and_parse_summary_results( self.administrator, self.election_id, version_num @@ -469,7 +469,7 @@ def get_summary_results(self, force_update=False) -> t.List[t.Dict]: return data - def get_detailed_results(self, force_update=False) -> t.List[t.Dict]: + def get_detailed_results(self, force_update=False) -> Optional[list[dict]]: """ Fetch the latest detailed results by geography for the given election, across all contests. @@ -527,7 +527,7 @@ def get_detailed_results(self, force_update=False) -> t.List[t.Dict]: version_num = self._get_version(self.administrator, self.election_id) if not force_update and version_num == self.previous_details_version_num: - return + return None detail_xml_url = DETAIL_XML_ZIP_URL_TEMPLATE.format( administrator=self.administrator, @@ -535,8 +535,6 @@ def get_detailed_results(self, force_update=False) -> t.List[t.Dict]: version_num=version_num, ) - parsed_data = [] - county_data = self._parse_file_from_zip_url(detail_xml_url, "detail.xml") if self.county: @@ -551,8 +549,8 @@ def get_detailed_results(self, force_update=False) -> t.List[t.Dict]: return parsed_data def get_detailed_results_for_participating_counties( - self, county_names: t.List[str] = None, force_update=False - ) -> t.Tuple[t.List[str], t.List[t.Dict]]: + self, county_names: list[str] = None, force_update=False + ) -> tuple[list[str], list[dict]]: """ Fetch the latest detailed results for the given election for all participating counties with detailed results, across all contests. diff --git a/parsons/sftp/sftp.py b/parsons/sftp/sftp.py index 426f6016bb..a141021245 100644 --- a/parsons/sftp/sftp.py +++ b/parsons/sftp/sftp.py @@ -13,7 +13,7 @@ logger = logging.getLogger(__name__) -class SFTP(object): +class SFTP: """ Instantiate SFTP Class @@ -513,9 +513,9 @@ def walk_tree( if max_depth > 3: logger.warning( - "Calling `walk_tree` with `max_depth` {}. " + f"Calling `walk_tree` with `max_depth` {max_depth}. " "Recursively walking a remote directory will be much slower than a " - "similar operation on a local file system.".format(max_depth) + "similar operation on a local file system." ) to_return = self._walk_tree( diff --git a/parsons/shopify/shopify.py b/parsons/shopify/shopify.py index 4e63b5bf1a..5c4d3882c8 100644 --- a/parsons/shopify/shopify.py +++ b/parsons/shopify/shopify.py @@ -6,7 +6,7 @@ from parsons.utilities.api_connector import APIConnector -class Shopify(object): +class Shopify: """ Instantiate the Shopify class `Args:` @@ -43,10 +43,7 @@ def __init__( self.password = check_env.check("SHOPIFY_PASSWORD", password, optional=True) self.api_key = check_env.check("SHOPIFY_API_KEY", api_key, optional=True) self.api_version = check_env.check("SHOPIFY_API_VERSION", api_version) - self.base_url = "https://%s.myshopify.com/admin/api/%s/" % ( - self.subdomain, - self.api_version, - ) + self.base_url = f"https://{self.subdomain}.myshopify.com/admin/api/{self.api_version}/" if self.access_token is None and (self.password is None or self.api_key is None): raise KeyError("Must set either access_token or both api_key and password.") if self.access_token is not None: @@ -163,14 +160,12 @@ def get_query_url(self, query_date=None, since_id=None, table_name=None, count=T # Specific date if provided query_date = datetime.strptime(query_date, "%Y-%m-%d") max_date = query_date + timedelta(days=1) - filters += "&created_at_min={}&created_at_max={}".format( - query_date.isoformat(), max_date.isoformat() - ) + filters += f"&created_at_min={query_date.isoformat()}&created_at_max={max_date.isoformat()}" elif since_id: # Since ID if provided - filters += "&since_id=%s" % since_id + filters += f"&since_id={since_id}" - return self.base_url + "%s?%s" % (table, filters) + return self.base_url + f"{table}?{filters}" def graphql(self, query): """ diff --git a/parsons/sisense/sisense.py b/parsons/sisense/sisense.py index dd2fd8298c..d9fbb38300 100644 --- a/parsons/sisense/sisense.py +++ b/parsons/sisense/sisense.py @@ -9,7 +9,7 @@ URI = "https://app.periscopedata.com/api/v1/" -class Sisense(object): +class Sisense: """ Instantiate the Sisense class. diff --git a/parsons/targetsmart/targetsmart_api.py b/parsons/targetsmart/targetsmart_api.py index ef46588bc3..1c802da452 100644 --- a/parsons/targetsmart/targetsmart_api.py +++ b/parsons/targetsmart/targetsmart_api.py @@ -58,7 +58,7 @@ def data_enhance(self, search_id, search_id_type="voterbase", state=None): """ if search_id_type in ["smartvan", "votebuilder", "voter"] and state is None: - raise KeyError("Search ID type '{}' requires state kwarg".format(search_id_type)) + raise KeyError(f"Search ID type '{search_id_type}' requires state kwarg") if search_id_type not in ( "voterbase", @@ -308,7 +308,7 @@ def district( return Table([self.connection.request(url, args=args, raw=True)["match_data"]]) -class Voter(object): +class Voter: def __init__(self, connection): self.connection = connection diff --git a/parsons/targetsmart/targetsmart_automation.py b/parsons/targetsmart/targetsmart_automation.py index df6e29095a..8950e7aba7 100644 --- a/parsons/targetsmart/targetsmart_automation.py +++ b/parsons/targetsmart/targetsmart_automation.py @@ -42,7 +42,7 @@ # Automation matching documentation can be found here: # https://docs.targetsmart.com/my_tsmart/automation/developer.html. -class TargetSmartAutomation(object): +class TargetSmartAutomation: """ * `Automation overview `_ * `Automation integration doc `_ diff --git a/parsons/tools/credential_tools.py b/parsons/tools/credential_tools.py index d38e716542..d62ff33a34 100644 --- a/parsons/tools/credential_tools.py +++ b/parsons/tools/credential_tools.py @@ -74,7 +74,7 @@ def encode_from_json_file(credential_file): str The encoded credential. """ - with open(credential_file, "r") as f: + with open(credential_file) as f: data = json.load(f) json_str = json.dumps(data) diff --git a/parsons/turbovote/turbovote.py b/parsons/turbovote/turbovote.py index 3bbec8446a..a9610651be 100644 --- a/parsons/turbovote/turbovote.py +++ b/parsons/turbovote/turbovote.py @@ -10,7 +10,7 @@ TURBOVOTE_URI = "https://turbovote-admin-http-api.prod.democracy.works/" -class TurboVote(object): +class TurboVote: """ Instantiate the TurboVote class diff --git a/parsons/utilities/api_connector.py b/parsons/utilities/api_connector.py index 54d48d989d..374953aeaf 100644 --- a/parsons/utilities/api_connector.py +++ b/parsons/utilities/api_connector.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -class APIConnector(object): +class APIConnector: """ The API Connector is a low level class for API requests that other connectors can utilize. It is understood that there are many standards for REST APIs and it will be diff --git a/parsons/utilities/dbt/dbt.py b/parsons/utilities/dbt/dbt.py index bac1597a9a..c9bb565746 100644 --- a/parsons/utilities/dbt/dbt.py +++ b/parsons/utilities/dbt/dbt.py @@ -2,7 +2,7 @@ import logging import pathlib -from typing import List, Optional, Type, Union +from typing import Optional, Union from dbt.cli.main import dbtRunner, dbtRunnerResult @@ -15,13 +15,13 @@ class dbtRunnerParsons: def __init__( self, - commands: Union[str, List[str]], + commands: Union[str, list[str]], dbt_project_directory: pathlib.Path, ) -> None: """Initialize dbtRunner with commands and a working directory. `Args:` - commands: Union[str, List[str]] + commands: Union[str, list[str]] A single dbt command string or a list of dbt command strings. e.g. ["seed", "build -s models/staging", "test"] @@ -67,15 +67,15 @@ def execute_dbt_command(self, command: str) -> Manifest: def run_dbt_commands( - commands: Union[str, List[str]], + commands: Union[str, list[str]], dbt_project_directory: pathlib.Path, - loggers: Optional[list[Union[dbtLogger, Type[dbtLogger]]]] = None, + loggers: Optional[list[Union[dbtLogger, type[dbtLogger]]]] = None, ) -> list[Manifest]: """Executes dbt commands within a directory, optionally logs results. Parameters: ----------- - commands : Union[str, List[str]] + commands : Union[str, list[str]] A single dbt command as a string or a list of dbt commands to be executed. diff --git a/parsons/utilities/dbt/logging.py b/parsons/utilities/dbt/logging.py index d05da8c2bf..8769f75a53 100644 --- a/parsons/utilities/dbt/logging.py +++ b/parsons/utilities/dbt/logging.py @@ -91,10 +91,10 @@ def format_command_result( ) if not log_summary_str: log_summary_str = "No models ran." - log_message += "\n*Summary*: `{}`".format(log_summary_str) + log_message += f"\n*Summary*: `{log_summary_str}`" - log_message += "\n*GB Processed*: {:.2f}".format(manifest.total_gb_processed) - log_message += "\n*Slot hours*: {:.2f}".format(manifest.total_slot_hours) + log_message += f"\n*GB Processed*: {manifest.total_gb_processed:.2f}" + log_message += f"\n*Slot hours*: {manifest.total_slot_hours:.2f}" # Errors if manifest.errors or manifest.fails: diff --git a/parsons/utilities/oauth_api_connector.py b/parsons/utilities/oauth_api_connector.py index 621d4dd059..5e41c2a21c 100644 --- a/parsons/utilities/oauth_api_connector.py +++ b/parsons/utilities/oauth_api_connector.py @@ -1,5 +1,5 @@ import urllib.parse -from typing import Dict, Optional +from typing import Optional from oauthlib.oauth2 import BackendApplicationClient from requests_oauthlib import OAuth2Session @@ -43,11 +43,11 @@ def __init__( client_secret: str, token_url: str, auto_refresh_url: Optional[str], - headers: Optional[Dict[str, str]] = None, + headers: Optional[dict[str, str]] = None, pagination_key: Optional[str] = None, data_key: Optional[str] = None, grant_type: str = "client_credentials", - authorization_kwargs: Optional[Dict[str, str]] = None, + authorization_kwargs: Optional[dict[str, str]] = None, ): super().__init__( uri, diff --git a/parsons/zoom/zoom.py b/parsons/zoom/zoom.py index 1390c3ba95..43abb9518e 100644 --- a/parsons/zoom/zoom.py +++ b/parsons/zoom/zoom.py @@ -1,7 +1,7 @@ import datetime import logging import uuid -from typing import Dict, Literal, Optional +from typing import Literal, Optional from oauthlib.oauth2.rfc6749.errors import InvalidClientError @@ -58,7 +58,7 @@ def _get_request( self, endpoint: str, data_key: Optional[str], - params: Optional[Dict[str, str]] = None, + params: Optional[dict[str, str]] = None, **kwargs, ) -> Table: """ @@ -229,7 +229,7 @@ def get_meetings( Parsons Table See :ref:`parsons-table` for output options. """ - params: Dict[str, str] = {"type": meeting_type} + params: dict[str, str] = {"type": meeting_type} if from_date: params["from"] = from_date.isoformat() if to_date: @@ -303,7 +303,7 @@ def get_user_webinars(self, user_id: str) -> Table: logger.info(f"Retrieved {tbl.num_rows} webinars.") return tbl - def get_past_webinar_report(self, webinar_id: str) -> Optional[Dict]: + def get_past_webinar_report(self, webinar_id: str) -> Optional[dict]: """ Get past meeting participants @@ -598,7 +598,7 @@ def _get_request( self, endpoint: str, data_key: Optional[str], - params: Optional[Dict[str, str]] = None, + params: Optional[dict[str, str]] = None, **kwargs, ) -> Table: """ diff --git a/pyproject.toml b/pyproject.toml index ca33cef29f..40d75a5a3e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ select = [ "W", # pycodestyle warnings (W) "C4", # flake8-comprehensions (C4) "F", # Pyflakes (F) + "UP", # pyupgrade (UP) "I", # isort (I) "TID", # flake8-tidy-imports (TID) "ICN", # flake8-import-conventions (ICN) diff --git a/test/test_action_builder/test_action_builder.py b/test/test_action_builder/test_action_builder.py index 27fc330cdc..e8da1019ce 100644 --- a/test/test_action_builder/test_action_builder.py +++ b/test/test_action_builder/test_action_builder.py @@ -13,9 +13,7 @@ class TestActionBuilder(unittest.TestCase): def setUp(self, m): self.subdomain = "fake_subdomain" self.campaign = "fake-campaign" - self.api_url = "https://{}.actionbuilder.org/api/rest/v1/campaigns/{}".format( - self.subdomain, self.campaign - ) + self.api_url = f"https://{self.subdomain}.actionbuilder.org/api/rest/v1/campaigns/{self.campaign}" self.api_key = "fake_key" self.bldr = ActionBuilder( diff --git a/test/test_aws_async.py b/test/test_aws_async.py index 58293171cd..86e33a8f9d 100644 --- a/test/test_aws_async.py +++ b/test/test_aws_async.py @@ -21,7 +21,7 @@ def fake_table_process(table, **fakekwargs): tableargs = (table, fakekwargs) -class FakeRunner(object): +class FakeRunner: def __init__(self, init1=None): self.init1 = init1 diff --git a/test/test_azure/test_azure_blob_storage.py b/test/test_azure/test_azure_blob_storage.py index 8ccb803504..5f0861672e 100644 --- a/test/test_azure/test_azure_blob_storage.py +++ b/test/test_azure/test_azure_blob_storage.py @@ -119,7 +119,7 @@ def test_put_blob(self): def test_download_blob(self): # Download blob and ensure that it has the expected file contents download_blob_path = self.azure_blob.download_blob(TEST_CONTAINER_NAME, TEST_FILE_NAME) - with open(download_blob_path, "r") as f: + with open(download_blob_path) as f: self.assertEqual(f.read(), TEST_FILE_CONTENTS) def test_delete_blob(self): diff --git a/test/test_catalist/conftest.py b/test/test_catalist/conftest.py index 20d91e4d3a..72e3c0794b 100644 --- a/test/test_catalist/conftest.py +++ b/test/test_catalist/conftest.py @@ -1,5 +1,5 @@ import re -from typing import Generator +from collections.abc import Generator from unittest.mock import MagicMock import pytest diff --git a/test/test_copper/test_copper.py b/test/test_copper/test_copper.py index 3c7088fa21..0fda4ec9e5 100644 --- a/test/test_copper/test_copper.py +++ b/test/test_copper/test_copper.py @@ -226,7 +226,7 @@ def paginate_callback(self, request, context): row_start = page_number * page_size row_finish = row_start + page_size - with open(f"{_dir}/{context.headers['filename']}", "r") as json_file: + with open(f"{_dir}/{context.headers['filename']}") as json_file: response = json.load(json_file) if isinstance(response, list): @@ -512,7 +512,7 @@ def test_process_json(self): def test_process_custom_fields(self): # Using same json file and processed data in testing both process_ and get_ methods - with open(f"{_dir}/custom_fields_search.json", "r") as json_file: + with open(f"{_dir}/custom_fields_search.json") as json_file: fake_response = json.load(json_file) fake_processed = self.cp.process_custom_fields(fake_response) diff --git a/test/test_credential_tools.py b/test/test_credential_tools.py index 2c2e36a853..df374368f9 100644 --- a/test/test_credential_tools.py +++ b/test/test_credential_tools.py @@ -66,7 +66,7 @@ def test_decode_credential_save(self): self.assertTrue(os.path.isfile(file_path)) - with open(file_path, "r") as f: + with open(file_path) as f: cred = json.load(f) self.assertDictEqual(cred, expected) diff --git a/test/test_databases/test_bigquery.py b/test/test_databases/test_bigquery.py index 5344897e48..363cb225b1 100644 --- a/test/test_databases/test_bigquery.py +++ b/test/test_databases/test_bigquery.py @@ -417,8 +417,8 @@ def test_copy__credentials_are_correctly_set__from_filepath( actual = os.environ[bq.env_credential_path] - with open(actual, "r") as factual: - with open(self.cred_path, "r") as fexpected: + with open(actual) as factual: + with open(self.cred_path) as fexpected: actual_str = factual.read() self.assertEqual(actual_str, fexpected.read()) self.assertEqual(self.cred_contents, json.loads(actual_str)) @@ -445,8 +445,8 @@ def test_copy__credentials_are_correctly_set__from_env( actual = os.environ[bq.env_credential_path] - with open(actual, "r") as factual: - with open(self.cred_path, "r") as fexpected: + with open(actual) as factual: + with open(self.cred_path) as fexpected: actual_str = factual.read() self.assertEqual(actual_str, fexpected.read()) self.assertEqual(self.cred_contents, json.loads(actual_str)) @@ -471,8 +471,8 @@ def test_copy__credentials_are_correctly_set__from_dict( actual = os.environ[bq.env_credential_path] - with open(actual, "r") as factual: - with open(self.cred_path, "r") as fexpected: + with open(actual) as factual: + with open(self.cred_path) as fexpected: actual_str = factual.read() self.assertEqual(actual_str, fexpected.read()) self.assertEqual(self.cred_contents, json.loads(actual_str)) @@ -689,9 +689,7 @@ def test_logger_fail_on_dataset_does_not_exist(self, capture): # create and set up logger logger = logging.getLogger() logger.error( - "Dataset {0} does not exist and if_dataset_not_exists set to {1}".format( - self.destination_dataset, self.if_dataset_not_exists - ) + f"Dataset {self.destination_dataset} does not exist and if_dataset_not_exists set to {self.if_dataset_not_exists}" ) # call the method to generate log message @@ -711,9 +709,7 @@ def test_logger_fail_on_dataset_does_not_exist(self, capture): ( "root", "ERROR", - "Dataset {0} does not exist and if_dataset_not_exists set to {1}".format( - self.destination_dataset, self.if_dataset_not_exists - ), + f"Dataset {self.destination_dataset} does not exist and if_dataset_not_exists set to {self.if_dataset_not_exists}", ) ) @@ -724,9 +720,7 @@ def test_logger_fail_on_table_exists(self, capture): ## now test with table copy error logger.error( - "BigQuery copy failed, Table {0} exists and if_table_exists set to {1}".format( - self.destination_table, self.if_table_exists - ) + f"BigQuery copy failed, Table {self.destination_table} exists and if_table_exists set to {self.if_table_exists}" ) # call the method to generate log message @@ -746,8 +740,6 @@ def test_logger_fail_on_table_exists(self, capture): ( "root", "ERROR", - "BigQuery copy failed, Table {0} exists and if_table_exists set to {1}".format( - self.destination_table, self.if_table_exists - ), + f"BigQuery copy failed, Table {self.destination_table} exists and if_table_exists set to {self.if_table_exists}", ) ) diff --git a/test/test_databases/test_dbsync.py b/test/test_databases/test_dbsync.py index 5d694afe35..2d92037a79 100644 --- a/test/test_databases/test_dbsync.py +++ b/test/test_databases/test_dbsync.py @@ -2,7 +2,7 @@ import tempfile import unittest from abc import ABC -from typing import Optional, Type +from typing import Optional from parsons import DBSync, Postgres, Redshift, Table from parsons.databases.database_connector import DatabaseConnector @@ -19,15 +19,15 @@ class TestDBSync(ABC, unittest.TestCase): setup_sql: Optional[str] = None teardown_sql: Optional[str] = None temp_schema: Optional[str] = TEMP_SCHEMA - db: Type[DatabaseConnector] + db: type[DatabaseConnector] @classmethod def setUpClass(cls): # Skip tests on this abstract base class if cls is TestDBSync: - raise unittest.SkipTest("%s is an abstract base class" % cls.__name__) + raise unittest.SkipTest(f"{cls.__name__} is an abstract base class") else: - super(TestDBSync, cls).setUpClass() + super().setUpClass() def setUp(self): self.initialize_db_connections() diff --git a/test/test_etl.py b/test/test_etl.py index 7d8f3fb1e9..5c116ae12b 100644 --- a/test/test_etl.py +++ b/test/test_etl.py @@ -149,7 +149,7 @@ def test_to_html(self): "\n" "\n" ) - with open(html_file, "r") as f: + with open(html_file) as f: self.assertEqual(f.read(), html) def test_to_temp_html(self): @@ -173,7 +173,7 @@ def test_to_temp_html(self): "\n" "\n" ) - with open(path, "r") as f: + with open(path) as f: self.assertEqual(f.read(), html) def _assert_expected_csv(self, path, orig_tbl): @@ -203,7 +203,7 @@ def test_to_from_temp_csv_compressed(self): def test_from_csv_string(self): path = self.tbl.to_csv() # Pull the file into a string - with open(path, "r") as f: + with open(path) as f: csv_string = f.read() result_tbl = Table.from_csv_string(csv_string) diff --git a/test/test_github/test_github.py b/test/test_github/test_github.py index c2ec7f4998..84e07fb0e1 100644 --- a/test/test_github/test_github.py +++ b/test/test_github/test_github.py @@ -24,7 +24,7 @@ def test_wrap_github_404(self, m): @requests_mock.Mocker() def test_get_repo(self, m): - with open(os.path.join(_dir, "test_data", "test_get_repo.json"), "r") as f: + with open(os.path.join(_dir, "test_data", "test_get_repo.json")) as f: m.get(requests_mock.ANY, text=f.read()) repo = self.github.get_repo("octocat/Hello-World") self.assertEqual(repo["id"], 1296269) @@ -32,9 +32,9 @@ def test_get_repo(self, m): @requests_mock.Mocker() def test_list_repo_issues(self, m): - with open(os.path.join(_dir, "test_data", "test_get_repo.json"), "r") as f: + with open(os.path.join(_dir, "test_data", "test_get_repo.json")) as f: m.get("https://api.github.com:443/repos/octocat/Hello-World", text=f.read()) - with open(os.path.join(_dir, "test_data", "test_list_repo_issues.json"), "r") as f: + with open(os.path.join(_dir, "test_data", "test_list_repo_issues.json")) as f: m.get( "https://api.github.com:443/repos/octocat/Hello-World/issues", text=f.read(), @@ -47,16 +47,16 @@ def test_list_repo_issues(self, m): @requests_mock.Mocker() def test_download_file(self, m): - with open(os.path.join(_dir, "test_data", "test_get_repo.json"), "r") as f: + with open(os.path.join(_dir, "test_data", "test_get_repo.json")) as f: m.get("https://api.github.com:443/repos/octocat/Hello-World", text=f.read()) - with open(os.path.join(_dir, "test_data", "test_download_file.csv"), "r") as f: + with open(os.path.join(_dir, "test_data", "test_download_file.csv")) as f: m.get( "https://raw.githubusercontent.com/octocat/Hello-World/testing/data.csv", text=f.read(), ) file_path = self.github.download_file("octocat/Hello-World", "data.csv", branch="testing") - with open(file_path, "r") as f: + with open(file_path) as f: file_contents = f.read() self.assertEqual(file_contents, "header\ndata\n") diff --git a/test/test_gmail/test_gmail.py b/test/test_gmail/test_gmail.py index ed513d444f..54f425d5a3 100644 --- a/test/test_gmail/test_gmail.py +++ b/test/test_gmail/test_gmail.py @@ -236,7 +236,7 @@ def test_create_message_attachments(self): else: file = f"{_dir}/assets/loremipsum_b64_txt.txt" - with open(file, "r") as f: + with open(file) as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) @@ -288,7 +288,7 @@ def test_create_message_attachments_jpeg(self): self.assertEqual(parts[0].get_payload(), message_text) self.assertEqual(parts[1].get_payload(), message_html) - with open(f"{_dir}/assets/loremipsum_b64_jpeg.txt", "r") as f: + with open(f"{_dir}/assets/loremipsum_b64_jpeg.txt") as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) @@ -342,7 +342,7 @@ def test_create_message_attachments_m4a(self): self.assertEqual(parts[0].get_payload(), message_text) self.assertEqual(parts[1].get_payload(), message_html) - with open(f"{_dir}/assets/loremipsum_b64_m4a.txt", "r") as f: + with open(f"{_dir}/assets/loremipsum_b64_m4a.txt") as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) @@ -394,7 +394,7 @@ def test_create_message_attachments_mp3(self): self.assertEqual(parts[0].get_payload(), message_text) self.assertEqual(parts[1].get_payload(), message_html) - with open(f"{_dir}/assets/loremipsum_b64_mp3.txt", "r") as f: + with open(f"{_dir}/assets/loremipsum_b64_mp3.txt") as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) @@ -446,7 +446,7 @@ def test_create_message_attachments_mp4(self): self.assertEqual(parts[0].get_payload(), message_text) self.assertEqual(parts[1].get_payload(), message_html) - with open(f"{_dir}/assets/loremipsum_b64_mp4.txt", "r") as f: + with open(f"{_dir}/assets/loremipsum_b64_mp4.txt") as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) @@ -499,7 +499,7 @@ def test_create_message_attachments_pdf(self): self.assertEqual(parts[0].get_payload(), message_text) self.assertEqual(parts[1].get_payload(), message_html) - with open(f"{_dir}/assets/loremipsum_b64_pdf.txt", "r") as f: + with open(f"{_dir}/assets/loremipsum_b64_pdf.txt") as f: b64_txt = f.read() self.assertEqual(parts[2].get_payload(), b64_txt) diff --git a/test/test_google/test_google_cloud_storage.py b/test/test_google/test_google_cloud_storage.py index 770937180a..a5c17ea687 100644 --- a/test/test_google/test_google_cloud_storage.py +++ b/test/test_google/test_google_cloud_storage.py @@ -83,7 +83,7 @@ def test_get_blob(self): def test_download_blob(self): # Download blob and ensure that it is the expected file path = self.cloud.download_blob(TEMP_BUCKET_NAME, TEMP_FILE_NAME) - with open(path, "r") as f: + with open(path) as f: self.assertEqual(f.read(), "A little string") def test_delete_blob(self): diff --git a/test/test_google/test_utilities.py b/test/test_google/test_utilities.py index c1e3c63a63..1e8808a4c6 100644 --- a/test/test_google/test_utilities.py +++ b/test/test_google/test_utilities.py @@ -40,7 +40,7 @@ def test_accepts_dictionary(self): util.setup_google_application_credentials(self.cred_contents, self.TEST_ENV_NAME) actual = os.environ[self.TEST_ENV_NAME] self.assertTrue(os.path.exists(actual)) - with open(actual, "r") as f: + with open(actual) as f: self.assertEqual(json.load(f), self.cred_contents) def test_accepts_string(self): @@ -48,14 +48,14 @@ def test_accepts_string(self): util.setup_google_application_credentials(cred_str, self.TEST_ENV_NAME) actual = os.environ[self.TEST_ENV_NAME] self.assertTrue(os.path.exists(actual)) - with open(actual, "r") as f: + with open(actual) as f: self.assertEqual(json.load(f), self.cred_contents) def test_accepts_file_path(self): util.setup_google_application_credentials(self.cred_path, self.TEST_ENV_NAME) actual = os.environ[self.TEST_ENV_NAME] self.assertTrue(os.path.exists(actual)) - with open(actual, "r") as f: + with open(actual) as f: self.assertEqual(json.load(f), self.cred_contents) def test_credentials_are_valid_after_double_call(self): @@ -67,8 +67,8 @@ def test_credentials_are_valid_after_double_call(self): util.setup_google_application_credentials(None, self.TEST_ENV_NAME) snd = os.environ[self.TEST_ENV_NAME] - with open(fst, "r") as ffst: - with open(snd, "r") as fsnd: + with open(fst) as ffst: + with open(snd) as fsnd: actual = fsnd.read() self.assertEqual(self.cred_contents, json.loads(actual)) self.assertEqual(ffst.read(), actual) diff --git a/test/test_rockthevote/test_rtv.py b/test/test_rockthevote/test_rtv.py index 5b178e8f67..8f1f212b72 100644 --- a/test/test_rockthevote/test_rtv.py +++ b/test/test_rockthevote/test_rtv.py @@ -75,7 +75,7 @@ def test_get_state_requirements(self, mocker): partner_id = "1" partner_api_key = "abcd" - with open(f"{_dir}/sample.json", "r") as j: + with open(f"{_dir}/sample.json") as j: expected_json = json.load(j) mocker.get( diff --git a/test/test_scytl/test_scytl.py b/test/test_scytl/test_scytl.py index ab1aa55dcf..b1d1c0724a 100644 --- a/test/test_scytl/test_scytl.py +++ b/test/test_scytl/test_scytl.py @@ -27,7 +27,7 @@ def tearDown(self) -> None: def test_get_summary_results_succeeds(self): result = self.scy.get_summary_results() - with open(f"{_DIR}/114729_summary_expected.csv", "r") as expected: + with open(f"{_DIR}/114729_summary_expected.csv") as expected: expectedResult = list(csv.DictReader(expected, delimiter=",")) for i, row in enumerate(result): @@ -56,7 +56,7 @@ def test_get_summary_results_skips_if_no_version_update(self): def test_get_detailed_results_succeeds(self): result = self.scy.get_detailed_results() - with open(f"{_DIR}/114729_county_expected.csv", "r") as expected: + with open(f"{_DIR}/114729_county_expected.csv") as expected: expectedResult = list(csv.DictReader(expected, delimiter=",")) for i in range(len(result)): @@ -85,7 +85,7 @@ def test_get_detailed_results_skips_if_no_version_update(self): def test_get_detailed_results_for_participating_counties_succeeds(self): _, result = self.scy.get_detailed_results_for_participating_counties() - with open(f"{_DIR}/114729_precinct_expected.csv", "r") as expected: + with open(f"{_DIR}/114729_precinct_expected.csv") as expected: expectedResult = list(csv.DictReader(expected, delimiter=",")) for i in range(len(result)): @@ -105,7 +105,7 @@ def test_get_detailed_results_for_participating_counties_succeeds_for_two_counti _, result = self.scy.get_detailed_results_for_participating_counties(county_names=counties) - with open(f"{_DIR}/114729_precinct_expected.csv", "r") as expected: + with open(f"{_DIR}/114729_precinct_expected.csv") as expected: expectedResult = csv.DictReader(expected, delimiter=",") filteredExpectedResults = list( @@ -192,7 +192,7 @@ def _mock_responses(self, m: requests_mock.Mocker): state=TEST_STATE, election_id=TEST_ELECTION_ID, version_num=TEST_VERSION_NUM ) - with open(f"{_DIR}/GA_114729_296262_county_election_settings.json", "r") as details_file: + with open(f"{_DIR}/GA_114729_296262_county_election_settings.json") as details_file: m.get(mock_election_settings_url, text=details_file.read()) for file in os.listdir(f"{_DIR}/mock_responses"): diff --git a/test/test_shopify.py b/test/test_shopify.py index 845ca7fe1e..9661e14f12 100644 --- a/test/test_shopify.py +++ b/test/test_shopify.py @@ -142,7 +142,7 @@ def test_get_query_url(self, m): @requests_mock.Mocker() def test_graphql(self, m): m.post( - "https://{0}.myshopify.com/admin/api/{1}/graphql.json".format(SUBDOMAIN, API_VERSION), + f"https://{SUBDOMAIN}.myshopify.com/admin/api/{API_VERSION}/graphql.json", json=self.mock_graphql, ) self.assertEqual( diff --git a/test/test_slack/test_slack.py b/test/test_slack/test_slack.py index 1d06736911..99d5e6f2e2 100644 --- a/test/test_slack/test_slack.py +++ b/test/test_slack/test_slack.py @@ -33,7 +33,7 @@ def test_slack_init(self): @requests_mock.Mocker() def test_channels(self, m): - with open(f"{responses_dir}/channels.json", "r") as f: + with open(f"{responses_dir}/channels.json") as f: slack_resp = json.load(f) m.post("https://slack.com/api/conversations.list", json=slack_resp) @@ -48,7 +48,7 @@ def test_channels(self, m): @requests_mock.Mocker() def test_channels_all_fields(self, m): - with open(f"{responses_dir}/channels.json", "r") as f: + with open(f"{responses_dir}/channels.json") as f: slack_resp = json.load(f) m.post("https://slack.com/api/conversations.list", json=slack_resp) @@ -111,7 +111,7 @@ def test_channels_all_fields(self, m): @requests_mock.Mocker() def test_users(self, m): - with open(f"{responses_dir}/users.json", "r") as f: + with open(f"{responses_dir}/users.json") as f: slack_resp = json.load(f) m.post("https://slack.com/api/users.list", json=slack_resp) @@ -132,7 +132,7 @@ def test_users(self, m): @requests_mock.Mocker() def test_users_all_fields(self, m): - with open(f"{responses_dir}/users.json", "r") as f: + with open(f"{responses_dir}/users.json") as f: slack_resp = json.load(f) m.post("https://slack.com/api/users.list", json=slack_resp) @@ -230,7 +230,7 @@ def test_users_all_fields(self, m): @requests_mock.Mocker() def test_message_channel(self, m): - with open(f"{responses_dir}/message_channel.json", "r") as f: + with open(f"{responses_dir}/message_channel.json") as f: slack_resp = json.load(f) m.post("https://slack.com/api/chat.postMessage", json=slack_resp) @@ -266,7 +266,7 @@ def test_message(self, m): @requests_mock.Mocker() def test_file_upload(self, m): file_path = f"{responses_dir}/file_upload.json" - with open(file_path, "r") as f: + with open(file_path) as f: slack_resp = json.load(f) m.post("https://slack.com/api/files.upload", json=slack_resp) diff --git a/test/test_smtp.py b/test/test_smtp.py index f211b941c1..402daefcec 100644 --- a/test/test_smtp.py +++ b/test/test_smtp.py @@ -6,7 +6,7 @@ from parsons import SMTP -class FakeConnection(object): +class FakeConnection: def __init__(self, result_obj): self.result_obj = result_obj diff --git a/test/test_targetsmart/test_targetsmart_automation.py b/test/test_targetsmart/test_targetsmart_automation.py index 526efc08c4..80599e71d3 100644 --- a/test/test_targetsmart/test_targetsmart_automation.py +++ b/test/test_targetsmart/test_targetsmart_automation.py @@ -27,9 +27,9 @@ def test_create_job_xml(self): job_xml = self.ts.create_job_xml( "job_type", "match_job", ["test@gmail.com", "test2@gmail.com"] ) - with open(self.test_xml, "r") as xml: + with open(self.test_xml) as xml: test_xml = xml.read() - with open(job_xml, "r") as xml: + with open(job_xml) as xml: real_xml = xml.read() self.assertEqual(test_xml, real_xml) diff --git a/test/test_turbovote/test_turbovote.py b/test/test_turbovote/test_turbovote.py index 1f234ffdd9..3ba2f57b71 100644 --- a/test/test_turbovote/test_turbovote.py +++ b/test/test_turbovote/test_turbovote.py @@ -60,7 +60,7 @@ def test_get_users(self, m): "sms subscribed", ] - with open(f"{_dir}/users.txt", "r") as users_text: + with open(f"{_dir}/users.txt") as users_text: # Mock endpoints m.post(self.tv.uri + "login", json=fake_token) m.get( diff --git a/test/test_utilities.py b/test/test_utilities.py index 046bcb33bb..3c99ed97e0 100644 --- a/test/test_utilities.py +++ b/test/test_utilities.py @@ -68,7 +68,7 @@ def test_create_temp_directory(): # Verify the temp file no longer exists with pytest.raises(FileNotFoundError): - open(test_file1, "r") + open(test_file1) def test_close_temp_file(): @@ -77,7 +77,7 @@ def test_close_temp_file(): # Verify the temp file no longer exists with pytest.raises(FileNotFoundError): - open(temp, "r") + open(temp) def test_is_gzip_path(): diff --git a/test/test_van/test_events.py b/test/test_van/test_events.py index a9f22ca7bc..e44a41d529 100644 --- a/test/test_van/test_events.py +++ b/test/test_van/test_events.py @@ -102,7 +102,7 @@ def test_get_event(self, m): "description": "This is a sample", } - m.get(self.van.connection.uri + "events/{}".format(event_id), json=json) + m.get(self.van.connection.uri + f"events/{event_id}", json=json) self.assertEqual(json, self.van.get_event(event_id)) diff --git a/test/test_van/test_people.py b/test/test_van/test_people.py index 21642ba3c5..e07582b371 100644 --- a/test/test_van/test_people.py +++ b/test/test_van/test_people.py @@ -237,13 +237,13 @@ def test_create_relationship(self, m): # Bad request m.post( - self.van.connection.uri + "people/{}/relationships".format(bad_vanid_1), + self.van.connection.uri + f"people/{bad_vanid_1}/relationships", status_code=404, ) # Good request m.post( - self.van.connection.uri + "people/{}/relationships".format(good_vanid_1), + self.van.connection.uri + f"people/{good_vanid_1}/relationships", status_code=204, ) diff --git a/test/test_van/test_scores.py b/test/test_van/test_scores.py index 272d144e0d..bd9190ec4e 100644 --- a/test/test_van/test_scores.py +++ b/test/test_van/test_scores.py @@ -64,7 +64,7 @@ def test_get_score(self, m): "description": None, } - m.get(self.van.connection.uri + "scores/{}".format(score_id), json=json) + m.get(self.van.connection.uri + f"scores/{score_id}", json=json) self.assertEqual(json, self.van.get_score(score_id)) @requests_mock.Mocker() @@ -186,7 +186,7 @@ def test_update_score_status(self, m): score_update_id = 27892 m.patch( - self.van.connection.uri + "scoreUpdates/{}".format(score_update_id), + self.van.connection.uri + f"scoreUpdates/{score_update_id}", status_code=204, ) From cf90066a8f0ff3e029cca4bf8ce4a267a618a1f6 Mon Sep 17 00:00:00 2001 From: Wil T Date: Sat, 12 Jul 2025 11:49:51 -0400 Subject: [PATCH 2/2] ruff format --- parsons/databases/discover_database.py | 4 +--- parsons/databases/redshift/rs_copy_table.py | 5 +---- parsons/nation_builder/nation_builder.py | 2 +- parsons/shopify/shopify.py | 4 +++- test/test_action_builder/test_action_builder.py | 4 +++- 5 files changed, 9 insertions(+), 10 deletions(-) diff --git a/parsons/databases/discover_database.py b/parsons/databases/discover_database.py index 6fabe83151..44c1ccb434 100644 --- a/parsons/databases/discover_database.py +++ b/parsons/databases/discover_database.py @@ -63,9 +63,7 @@ def discover_database( for connector in default_connector: if connector.__name__ in detected: return connector() - raise OSError( - f"None of the default connectors {default_connector} were detected." - ) + raise OSError(f"None of the default connectors {default_connector} were detected.") elif default_connector.__name__ in detected: return default_connector() else: diff --git a/parsons/databases/redshift/rs_copy_table.py b/parsons/databases/redshift/rs_copy_table.py index c8cf7dbfdd..29e4da3ca5 100644 --- a/parsons/databases/redshift/rs_copy_table.py +++ b/parsons/databases/redshift/rs_copy_table.py @@ -146,10 +146,7 @@ def temp_s3_copy( ): if not self.s3_temp_bucket: raise KeyError( - - "Missing S3_TEMP_BUCKET, needed for transferring data to Redshift. " - "Must be specified as env vars or kwargs" - + "Missing S3_TEMP_BUCKET, needed for transferring data to Redshift. Must be specified as env vars or kwargs" ) # Coalesce S3 Key arguments diff --git a/parsons/nation_builder/nation_builder.py b/parsons/nation_builder/nation_builder.py index 47374ee1d1..eff7f0a5a1 100644 --- a/parsons/nation_builder/nation_builder.py +++ b/parsons/nation_builder/nation_builder.py @@ -112,7 +112,7 @@ def get_people(self) -> Table: logging.error(f"error requesting data from Nation Builder: {error}") wait_time = 30 - logging.info("waiting %s seconds before retrying" , wait_time) + logging.info("waiting %s seconds before retrying", wait_time) time.sleep(wait_time) return Table(data) diff --git a/parsons/shopify/shopify.py b/parsons/shopify/shopify.py index 5c4d3882c8..0449056c47 100644 --- a/parsons/shopify/shopify.py +++ b/parsons/shopify/shopify.py @@ -160,7 +160,9 @@ def get_query_url(self, query_date=None, since_id=None, table_name=None, count=T # Specific date if provided query_date = datetime.strptime(query_date, "%Y-%m-%d") max_date = query_date + timedelta(days=1) - filters += f"&created_at_min={query_date.isoformat()}&created_at_max={max_date.isoformat()}" + filters += ( + f"&created_at_min={query_date.isoformat()}&created_at_max={max_date.isoformat()}" + ) elif since_id: # Since ID if provided filters += f"&since_id={since_id}" diff --git a/test/test_action_builder/test_action_builder.py b/test/test_action_builder/test_action_builder.py index e8da1019ce..10e1291ced 100644 --- a/test/test_action_builder/test_action_builder.py +++ b/test/test_action_builder/test_action_builder.py @@ -13,7 +13,9 @@ class TestActionBuilder(unittest.TestCase): def setUp(self, m): self.subdomain = "fake_subdomain" self.campaign = "fake-campaign" - self.api_url = f"https://{self.subdomain}.actionbuilder.org/api/rest/v1/campaigns/{self.campaign}" + self.api_url = ( + f"https://{self.subdomain}.actionbuilder.org/api/rest/v1/campaigns/{self.campaign}" + ) self.api_key = "fake_key" self.bldr = ActionBuilder(