diff --git a/.github/workflows/python-ci.yml b/.github/workflows/python-ci.yml index 558fce1a5..195f031d5 100644 --- a/.github/workflows/python-ci.yml +++ b/.github/workflows/python-ci.yml @@ -61,7 +61,7 @@ jobs: run: | cd python-package pip install -r requirements-dev.txt - pip install coveralls + pip install coveralls toml shell: bash - name: Install package run: | @@ -109,7 +109,7 @@ jobs: run: | cd python-package pip install -r requirements-dev.txt - pip install coveralls + pip install coveralls toml shell: cmd - name: Install package run: | diff --git a/bases/br_bd_diretorios_brasil/dataset_config.yaml b/bases/br_bd_diretorios_brasil/dataset_config.yaml index 6effaf7eb..72c84087a 100644 --- a/bases/br_bd_diretorios_brasil/dataset_config.yaml +++ b/bases/br_bd_diretorios_brasil/dataset_config.yaml @@ -45,4 +45,5 @@ github_url: # Não altere esse campo. # Data da última modificação dos metadados gerada automaticamente pelo CKAN. -metadata_modified: '2022-02-09T21:59:32.440801' + +metadata_modified: '2022-02-09T21:59:32.440801' \ No newline at end of file diff --git a/bases/test_dataset/README.md b/bases/test_dataset/README.md deleted file mode 100644 index 0583ac187..000000000 --- a/bases/test_dataset/README.md +++ /dev/null @@ -1,7 +0,0 @@ -Como capturar os dados de test_dataset? - -Para capturar esses dados, basta verificar o link dos dados originais indicado em dataset_config.yaml no item website. - -Caso tenha sido utilizado algum código de captura ou tratamento, estes estarão contidos em code/. Se o dado publicado for em sua versão bruta, não existirá a pasta code/. - -Os dados publicados estão disponíveis em: https://basedosdados.org/dataset/test-dataset \ No newline at end of file diff --git a/python-package/README.md b/python-package/README.md index 7f9264360..3821ddd17 100644 --- a/python-package/README.md +++ b/python-package/README.md @@ -37,3 +37,10 @@ Publique nova versão poetry version [patch|minor|major] poetry publish --build ``` + +Versão Alpha e Beta + +``` +version = "1.6.2-alpha.3" +version = "1.6.2-beta.3" +``` diff --git a/python-package/basedosdados/__init__.py b/python-package/basedosdados/__init__.py index 58e6a68ca..fd197e9ab 100644 --- a/python-package/basedosdados/__init__.py +++ b/python-package/basedosdados/__init__.py @@ -21,4 +21,5 @@ get_dataset_description, get_table_columns, get_table_size, -) + search +) \ No newline at end of file diff --git a/python-package/basedosdados/cli/cli.py b/python-package/basedosdados/cli/cli.py index 8f8b4542a..fe03cefb0 100644 --- a/python-package/basedosdados/cli/cli.py +++ b/python-package/basedosdados/cli/cli.py @@ -77,10 +77,25 @@ def mode_text(mode, verb, obj_id): default="raise", help="[raise|update|replace|pass] if dataset alread exists", ) +@click.option( + "--dataset_is_public", + default=True, + help="Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public.", +) +@click.option( + "--location", + default=None, + help="Location of dataset data. List of possible region names locations: https://cloud.google.com/bigquery/docs/locations", +) @click.pass_context -def create_dataset(ctx, dataset_id, mode, if_exists): +def create_dataset(ctx, dataset_id, mode, if_exists, dataset_is_public, location): - Dataset(dataset_id=dataset_id, **ctx.obj).create(mode=mode, if_exists=if_exists) + Dataset(dataset_id=dataset_id, **ctx.obj).create( + mode=mode, + if_exists=if_exists, + dataset_is_public=dataset_is_public, + location=location, + ) click.echo( click.style( @@ -96,9 +111,9 @@ def create_dataset(ctx, dataset_id, mode, if_exists): "--mode", "-m", default="all", help="What datasets to create [prod|staging|all]" ) @click.pass_context -def update_dataset(ctx, dataset_id, mode): +def update_dataset(ctx, dataset_id, mode, location): - Dataset(dataset_id=dataset_id, **ctx.obj).update(mode=mode) + Dataset(dataset_id=dataset_id, **ctx.obj).update(mode=mode, location=location) click.echo( click.style( @@ -110,10 +125,17 @@ def update_dataset(ctx, dataset_id, mode): @cli_dataset.command(name="publicize", help="Make a dataset public") @click.argument("dataset_id") +@click.option( + "--dataset_is_public", + default=True, + help="Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public.", +) @click.pass_context -def publicize_dataset(ctx, dataset_id): +def publicize_dataset(ctx, dataset_id, dataset_is_public): - Dataset(dataset_id=dataset_id, **ctx.obj).publicize() + Dataset(dataset_id=dataset_id, **ctx.obj).publicize( + dataset_is_public=dataset_is_public + ) click.echo( click.style( @@ -168,7 +190,12 @@ def cli_table(): help="[raise|replace|pass] actions if table config files already exist", ) @click.option( - "--columns_config_url", + "--source_format", + default="csv", + help="Data source format. Only 'csv' is supported. Defaults to 'csv'.", +) +@click.option( + "--columns_config_url_or_path", default=None, help="google sheets URL. Must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. The sheet must contain the column name: 'coluna' and column description: 'descricao'.", ) @@ -180,14 +207,16 @@ def init_table( data_sample_path, if_folder_exists, if_table_config_exists, - columns_config_url, + source_format, + columns_config_url_or_path, ): t = Table(table_id=table_id, dataset_id=dataset_id, **ctx.obj).init( data_sample_path=data_sample_path, if_folder_exists=if_folder_exists, if_table_config_exists=if_table_config_exists, - columns_config_url=columns_config_url, + source_format=source_format, + columns_config_url_or_path=columns_config_url_or_path, ) click.echo( @@ -232,9 +261,24 @@ def init_table( help="[raise|replace|pass] actions if table config files already exist", ) @click.option( - "--columns_config_url", + "--source_format", + default="csv", + help="Data source format. Only 'csv' is supported. Defaults to 'csv'.", +) +@click.option( + "--columns_config_url_or_path", + default=None, + help="Path to the local architeture file or a public google sheets URL. Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats. Google sheets URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=.", +) +@click.option( + "--dataset_is_public", + default=True, + help="Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public.", +) +@click.option( + "--location", default=None, - help="google sheets URL. Must be in the format https://docs.google.com/spreadsheets/d//edit#gid=", + help="Location of dataset data. List of possible region names locations: https://cloud.google.com/bigquery/docs/locations", ) @click.pass_context def create_table( @@ -247,7 +291,10 @@ def create_table( force_dataset, if_storage_data_exists, if_table_config_exists, - columns_config_url, + source_format, + columns_config_url_or_path, + dataset_is_public, + location, ): Table(table_id=table_id, dataset_id=dataset_id, **ctx.obj).create( @@ -257,7 +304,10 @@ def create_table( force_dataset=force_dataset, if_storage_data_exists=if_storage_data_exists, if_table_config_exists=if_table_config_exists, - columns_config_url=columns_config_url, + source_format=source_format, + columns_config_url_or_path=columns_config_url_or_path, + dataset_is_public=dataset_is_public, + location=location, ) click.echo( @@ -297,23 +347,32 @@ def update_table(ctx, dataset_id, table_id, mode): @click.argument("dataset_id") @click.argument("table_id") @click.option( - "--columns_config_url", + "--columns_config_url_or_path", default=None, - help="""\nGoogle sheets URL. Must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. -\nThe sheet must contain the columns:\n - - nome: column name\n - - descricao: column description\n - - tipo: column bigquery type\n - - unidade_medida: column mesurement unit\n - - dicionario: column related dictionary\n - - nome_diretorio: column related directory in the format .: + help="""\nFills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate + \npublish.sql and autofill type using bigquery_type.\n + + \nThe sheet must contain the columns:\n + - name: column name\n + - description: column description\n + - bigquery_type: column bigquery type\n + - measurement_unit: column mesurement unit\n + - covered_by_dictionary: column related dictionary\n + - directory_column: column related directory in the format .:\n + - temporal_coverage: column temporal coverage\n + - has_sensitive_data: the column has sensitive data\n + - observations: column observations\n + \nArgs:\n + \ncolumns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL.\n + Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats.\n + Google sheets URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=.\n """, ) @click.pass_context -def update_columns(ctx, dataset_id, table_id, columns_config_url): +def update_columns(ctx, dataset_id, table_id, columns_config_url_or_path): Table(table_id=table_id, dataset_id=dataset_id, **ctx.obj).update_columns( - columns_config_url=columns_config_url, + columns_config_url_or_path=columns_config_url_or_path, ) click.echo( diff --git a/python-package/basedosdados/configs/templates/table/table_description.txt b/python-package/basedosdados/configs/templates/table/table_description.txt index 164f5849c..c52f26948 100644 --- a/python-package/basedosdados/configs/templates/table/table_description.txt +++ b/python-package/basedosdados/configs/templates/table/table_description.txt @@ -48,8 +48,8 @@ Email: {{ data_cleaned_by.email }} {% call input(partitions) -%} Partições (Filtre a tabela por essas colunas para economizar dinheiro e tempo) --------- -{% if (partitions.split(',') is not none) -%} -{% for partition in partitions.split(',') -%} +{% if (partitions is not none) -%} +{% for partition in partitions -%} - {{ partition }} {% endfor -%} {%- endif %} diff --git a/python-package/basedosdados/constants.py b/python-package/basedosdados/constants.py index 6ebfb80a5..f5492d76b 100644 --- a/python-package/basedosdados/constants.py +++ b/python-package/basedosdados/constants.py @@ -1,6 +1,14 @@ -__all__ = ["constants"] +__all__ = ["config", "constants"] from enum import Enum +from dataclasses import dataclass + + +@dataclass +class config: + verbose: bool = True + billing_project_id: str = None + project_config_path: str = None class constants(Enum): diff --git a/python-package/basedosdados/download/download.py b/python-package/basedosdados/download/download.py index f6124ad1d..dbf1d7e83 100644 --- a/python-package/basedosdados/download/download.py +++ b/python-package/basedosdados/download/download.py @@ -18,6 +18,7 @@ BaseDosDadosInvalidProjectIDException, BaseDosDadosNoBillingProjectIDException, ) +from basedosdados.constants import config, constants from pandas_gbq.gbq import GenericGBQException @@ -49,6 +50,10 @@ def read_sql( Query result """ + # standard billing_project_id configuration + if billing_project_id is None: + billing_project_id == config.billing_project_id + try: # Set a two hours timeout bigquery_storage_v1.client.BigQueryReadClient.read_rows = partialmethod( @@ -86,8 +91,8 @@ def read_sql( def read_table( dataset_id, table_id, - query_project_id="basedosdados", billing_project_id=None, + query_project_id="basedosdados", limit=None, from_file=False, reauth=False, @@ -101,10 +106,10 @@ def read_table( table_id (str): Optional. Table id available in basedosdados.dataset_id. It should always come with dataset_id. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. billing_project_id (str): Optional. Project that will be billed. Find your Project ID here https://console.cloud.google.com/projectselector2/home/dashboard + query_project_id (str): Optional. + Which project the table lives. You can change this you want to query different projects. limit (int): Optional. Number of rows to read from table. from_file (boolean): Optional. @@ -122,6 +127,10 @@ def read_table( Query result """ + # standard billing_project_id configuration + if billing_project_id is None: + billing_project_id == config.billing_project_id + if (dataset_id is not None) and (table_id is not None): query = f""" SELECT * @@ -147,8 +156,8 @@ def download( query=None, dataset_id=None, table_id=None, - query_project_id="basedosdados", billing_project_id=None, + query_project_id="basedosdados", limit=None, from_file=False, reauth=False, @@ -180,10 +189,10 @@ def download( table_id (str): Optional. Table id available in basedosdados.dataset_id. It should always come with dataset_id. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. billing_project_id (str): Optional. Project that will be billed. Find your Project ID here https://console.cloud.google.com/projectselector2/home/dashboard + query_project_id (str): Optional. + Which project the table lives. You can change this you want to query different projects. limit (int): Optional Number of rows. from_file (boolean): Optional. @@ -201,6 +210,10 @@ def download( "Either table_id, dataset_id or query should be filled." ) + # standard billing_project_id configuration + if billing_project_id is None: + billing_project_id == config.billing_project_id + client = google_client(query_project_id, billing_project_id, from_file, reauth) # makes sure that savepath is a filepath and not a folder diff --git a/python-package/basedosdados/download/metadata.py b/python-package/basedosdados/download/metadata.py index fe5d5dff9..9aae894ba 100644 --- a/python-package/basedosdados/download/metadata.py +++ b/python-package/basedosdados/download/metadata.py @@ -1,24 +1,25 @@ from google.cloud import bigquery import pandas as pd +import requests from basedosdados.download.base import credentials - -def _get_header(text): - """Gets first paragraph of a text - - Args: - text (str or None): Text to be split - - Returns: - str: First paragraph +def _safe_fetch(url:str): """ - - if isinstance(text, str): - return text.split("\n")[0] - elif text is None: - return "" - + Safely fetchs urls and, if somehting goes wrong, informs user what is the possible cause + """ + try: + response = requests.get(url) + response.raise_for_status() + return response + except requests.exceptions.RequestException as err: + print ("This url doesn't appear to exists:",err) + except requests.exceptions.HTTPError as errh: + print ("Http Error:",errh) + except requests.exceptions.ConnectionError as errc: + print ("Error Connecting:",errc) + except requests.exceptions.Timeout as errt: + print ("Timeout Error:",errt) def _fix_size(s, step=80): @@ -32,7 +33,6 @@ def _fix_size(s, step=80): def _print_output(df): """Prints dataframe contents as print blocks - Args: df (pd.DataFrame): table to be printed """ @@ -46,14 +46,6 @@ def _print_output(df): print("-" * (step + 15)) print() - # func = lambda lista, final, step: ( - # func(lista[1:], - # (final + lista[0] + ' ') - # if len(final.split('\n')[-1]) <= step - # else final + '\n', - # step - # ) if len(lista) else final) - def _handle_output(verbose, output_type, df, col_name=None): """Handles datasets and tables listing outputs based on user's choice. @@ -91,234 +83,226 @@ def _handle_output(verbose, output_type, df, col_name=None): return None - -def list_datasets( - query_project_id="basedosdados", - filter_by=None, - with_description=False, - from_file=False, - verbose=True, -): - """Fetch the dataset_id of datasets available at query_project_id. Prints information on - screen or returns it as a list. +def list_datasets(query, limit=10, with_description=False, verbose=True): + """ + This function uses `bd_dataset_search` website API + enpoint to retrieve a list of available datasets. Args: - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. - filter_by (str): Optional - String to be matched in dataset_id. + query (str): + String to search in datasets' metadata. + limit (int): + Field to limit the number of results with_description (bool): Optional If True, fetch short dataset description for each dataset. verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, a list object is returned. - - Example: - list_datasets( - filter_by='sp', - with_description=True, - ) + Returns: + list | stdout """ - client = bigquery.Client( - credentials=credentials(from_file=from_file), project=query_project_id - ) - - datasets_list = list(client.list_datasets()) - - datasets = pd.DataFrame( - [dataset.dataset_id for dataset in datasets_list], columns=["dataset_id"] - ) - - if filter_by: - - datasets = datasets.loc[datasets["dataset_id"].str.contains(filter_by)] - - if with_description: - - datasets["description"] = [ - _get_header(client.get_dataset(dataset).description) - for dataset in datasets["dataset_id"] + url = f"https://basedosdados.org/api/3/action/bd_dataset_search?q={query}&page_size={limit}&resource_type=bdm_table" + + response = _safe_fetch(url) + + json_response = response.json() + + # this dict has all information we need to output the function + dataset_dict = { + "dataset_id": [ + dataset["name"] for dataset in json_response["result"]["datasets"] + ], + "description": [ + dataset["notes"] if "notes" in dataset.keys() else None + for dataset in json_response["result"]["datasets"] + ], + } + + # select desired output using dataset_id info. Note that the output is either a standardized string or a list + if verbose & (with_description == False): + return _print_output(pd.DataFrame.from_dict(dataset_dict)[["dataset_id"]]) + elif verbose & with_description: + return _print_output( + pd.DataFrame.from_dict(dataset_dict)[["dataset_id", "description"]] + ) + elif (verbose == False) & (with_description == False): + return dataset_dict["dataset_id"] + elif (verbose == False) & with_description: + return [ + { + "dataset_id": dataset_dict["dataset_id"][k], + "description": dataset_dict["description"][k], + } + for k in range(len(dataset_dict["dataset_id"])) ] - return _handle_output( - verbose=verbose, - output_type="list", - df=datasets, - col_name="dataset_id", - ) - def list_dataset_tables( dataset_id, - query_project_id="basedosdados", - from_file=False, - filter_by=None, with_description=False, verbose=True, ): - """Fetch table_id for tables available at the specified dataset_id. Prints the information - on screen or returns it as a list. + """ + Fetch table_id for tables available at the specified dataset_id. Prints the information on screen or returns it as a list. Args: dataset_id (str): Optional. - Dataset id available in basedosdados. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. - filter_by (str): Optional - String to be matched in the table_id. + Dataset id returned by list_datasets function + limit (int): + Field to limit the number of results with_description (bool): Optional If True, fetch short table descriptions for each table that match the search criteria. verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, a list object is returned. - Example: - list_dataset_tables( - dataset_id='br_ibge_censo2010' - filter_by='renda', - with_description=True, - ) + Returns: + stdout | list """ - client = bigquery.Client( - credentials=credentials(from_file=from_file), project=query_project_id - ) - - dataset = client.get_dataset(dataset_id) - - tables_list = list(client.list_tables(dataset)) - tables = pd.DataFrame( - [table.table_id for table in tables_list], columns=["table_id"] - ) - - if filter_by: - - tables = tables.loc[tables["table_id"].str.contains(filter_by)] - - if with_description: - - tables["description"] = [ - _get_header(client.get_table(f"{dataset_id}.{table}").description) - for table in tables["table_id"] + dataset_id = dataset_id.replace("-","_") #The dataset_id pattern in the bd_dataset_search endpoint response uses a hyphen as a separator, while in the endpoint urls that specify the dataset_id parameter the separator used is an underscore. See issue #1079 + + url = f"https://basedosdados.org/api/3/action/bd_bdm_dataset_show?dataset_id={dataset_id}" + + response = _safe_fetch(url) + + json_response = response.json() + + dataset = json_response["result"] + # this dict has all information need to output the function + table_dict = { + "table_id": [ + dataset["resources"][k]["name"] for k in range(len(dataset["resources"])) + ], + "description": [ + dataset["resources"][k]["description"] + for k in range(len(dataset["resources"])) + ], + } + # select desired output using table_id info. Note that the output is either a standardized string or a list + if verbose & (with_description == False): + return _print_output(pd.DataFrame.from_dict(table_dict)[["table_id"]]) + elif verbose & with_description: + return _print_output( + pd.DataFrame.from_dict(table_dict)[["table_id", "description"]] + ) + elif (verbose == False) & (with_description == False): + return table_dict["table_id"] + elif (verbose == False) & with_description: + return [ + { + "table_id": table_dict["table_id"][k], + "description": table_dict["description"][k], + } + for k in range(len(table_dict["table_id"])) ] - return _handle_output( - verbose=verbose, - output_type="list", - df=tables, - col_name="table_id", - ) - def get_dataset_description( - dataset_id=None, - query_project_id="basedosdados", - from_file=False, + dataset_id, verbose=True, ): - """Prints the full dataset description. + """ + Prints the full dataset description. Args: - dataset_id (str): Optional. - Dataset id available in basedosdados. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. + dataset_id (str): Required. + Dataset id available in list_datasets. verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, data is returned as a `str`. + + Returns: + stdout | str """ + url = f"https://basedosdados.org/api/3/action/bd_bdm_dataset_show?dataset_id={dataset_id}" - client = bigquery.Client( - credentials=credentials(from_file=from_file), project=query_project_id - ) + response = _safe_fetch(url) - dataset = client.get_dataset(dataset_id) + json_response = response.json() - return _handle_output(verbose=verbose, output_type="str", df=dataset) + description = json_response["result"]["notes"] + + if verbose: + print(description) + else: + return description def get_table_description( - dataset_id=None, - table_id=None, - query_project_id="basedosdados", - from_file=False, + dataset_id, + table_id, verbose=True, ): - """Prints the full table description. + """ + Prints the full table description. Args: - dataset_id (str): Optional. - Dataset id available in basedosdados. It should always come with table_id. - table_id (str): Optional. - Table id available in basedosdados.dataset_id. - It should always come with dataset_id. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. + dataset_id (str): Required. + Dataset id available in list_datasets. + table_id (str): Required. + Table id available in list_dataset_tables verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, data is returned as a `str`. + + Returns: + stdout | str """ - client = bigquery.Client( - credentials=credentials(from_file=from_file), project=query_project_id - ) + url = f"https://basedosdados.org/api/3/action/bd_bdm_table_show?dataset_id={dataset_id}&table_id={table_id}" + + response = _safe_fetch(url) - table = client.get_table(f"{dataset_id}.{table_id}") + json_response = response.json() - return _handle_output(verbose=verbose, output_type="str", df=table) + description = json_response["result"]["description"] + + if verbose: + print(description) + else: + return description def get_table_columns( - dataset_id=None, - table_id=None, - query_project_id="basedosdados", - from_file=False, + dataset_id, + table_id, verbose=True, ): - """Fetch the names, types and descriptions for the columns in the specified table. Prints - information on screen. - + """ + Fetch the names, types and descriptions for the columns in the specified table. Prints + information on screen. Args: - dataset_id (str): Optional. - Dataset id available in basedosdados. It should always come with table_id. - table_id (str): Optional. - Table id available in basedosdados.dataset_id. - It should always come with dataset_id. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. + dataset_id (str): Required. + Dataset id available in list_datasets. + table_id (str): Required. + Table id available in list_dataset_tables verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, data is returned as a `list` of `dict`s. - Example: - get_table_columns( - dataset_id='br_ibge_censo2010', - table_id='pessoa_renda_setor_censitario' - ) + + Returns: + stdout | list """ - client = bigquery.Client( - credentials=credentials(from_file=from_file), project=query_project_id - ) + url = f"https://basedosdados.org/api/3/action/bd_bdm_table_show?dataset_id={dataset_id}&table_id={table_id}" - table_ref = client.get_table(f"{dataset_id}.{table_id}") + response = _safe_fetch(url) - columns = [ - (field.name, field.field_type, field.description) for field in table_ref.schema - ] + json_response = response.json() - description = pd.DataFrame(columns, columns=["name", "field_type", "description"]) + columns = json_response["result"]["columns"] - return _handle_output(verbose=verbose, output_type="records", df=description) + if verbose: + _print_output(pd.DataFrame(columns)) + else: + return columns def get_table_size( dataset_id, table_id, - billing_project_id, - query_project_id="basedosdados", - from_file=False, verbose=True, ): - """Use a query to get the number of rows and size (in Mb) of a table query - from BigQuery. Prints information on screen in markdown friendly format. + """Use a query to get the number of rows and size (in Mb) of a table. WARNING: this query may cost a lot depending on the table. @@ -328,41 +312,67 @@ def get_table_size( table_id (str): Optional. Table id available in basedosdados.dataset_id. It should always come with dataset_id. - query_project_id (str): Optional. - Which project the table lives. You can change this you want to query different projects. - billing_project_id (str): Optional. - Project that will be billed. Find your Project ID here https://console.cloud.google.com/projectselector2/home/dashboard verbose (bool): Optional. If set to True, information is printed to the screen. If set to False, data is returned as a `list` of `dict`s. - Example: - get_table_size( - dataset_id='br_ibge_censo2010', - table_id='pessoa_renda_setor_censitario', - billing_project_id='yourprojectid' - ) """ - billing_client = bigquery.Client( - credentials=credentials(from_file=from_file), project=billing_project_id - ) + url = f"https://basedosdados.org/api/3/action/bd_bdm_table_show?dataset_id={dataset_id}&table_id={table_id}" - query = f"""SELECT COUNT(*) FROM {query_project_id}.{dataset_id}.{table_id}""" + response = _safe_fetch(url) - job = billing_client.query(query, location="US") + json_response = response.json() - num_rows = job.to_dataframe().loc[0, "f0_"] + size = json_response["result"]["size"] - size_mb = round(job.total_bytes_processed / 1024 / 1024, 2) + if size==None: + print("Size not available") + else: + if verbose: + _print_output(pd.DataFrame(size)) + else: + return size +def search(query, order_by): + """This function works as a wrapper to the `bd_dataset_search` website API + enpoint. - table_data = pd.DataFrame( - [ - { - "project_id": query_project_id, - "dataset_id": dataset_id, - "table_id": table_id, - "num_rows": num_rows, - "size_mb": size_mb, - } - ] - ) + Args: + query (str): + String to search in datasets and tables' metadata. + order_by (str): score|popular|recent + Field by which the results will be ordered. + + Returns: + pd.DataFrame: + Response from the API presented as a pandas DataFrame. Each row is + a table. Each column is a field identifying the table. + """ + + # validate order_by input + if order_by not in ["score", "popular", "recent"]: + raise ValueError( + f'order_by must be score, popular or recent. Received "{order_by}"' + ) - return _handle_output(verbose=verbose, output_type="records", df=table_data) + url = f"https://basedosdados.org/api/3/action/bd_dataset_search?q={query}&order_by={order_by}&resource_type=bdm_table" + + response = _safe_fetch(url) + + json_response = response.json() + + dataset_dfs = [] + # first loop identify the number of the tables in each datasets + for dataset in json_response["result"]["datasets"]: + tables_dfs = [] + len(dataset["resources"]) + # second loop extracts tables' information for each dataset + for table in dataset["resources"]: + data_table = pd.DataFrame( + {k: str(table[k]) for k in list(table.keys())}, index=[0] + ) + tables_dfs.append(data_table) + # append tables' dataframes for each dataset + data_ds = tables_dfs[0].append(tables_dfs[1:]).reset_index(drop=True) + dataset_dfs.append(data_ds) + # append datasets' dataframes + df = dataset_dfs[0].append(dataset_dfs[1:]).reset_index(drop=True) + + return df \ No newline at end of file diff --git a/python-package/basedosdados/upload/base.py b/python-package/basedosdados/upload/base.py index 1ea48886f..cc121ce65 100644 --- a/python-package/basedosdados/upload/base.py +++ b/python-package/basedosdados/upload/base.py @@ -1,5 +1,6 @@ from google.cloud import bigquery, storage from google.oauth2 import service_account +from loguru import logger import yaml from jinja2 import Template from pathlib import Path @@ -7,9 +8,10 @@ import tomlkit import warnings import json +import sys import base64 from os import getenv -from basedosdados import constants +from basedosdados.constants import config, constants from functools import lru_cache @@ -26,9 +28,14 @@ def __init__( overwrite_cli_config=False, ): + # standard config_path configuration + if config_path is None: + config_path == config.config_path + self.config_path = Path.home() / config_path self._init_config(force=overwrite_cli_config) self.config = self._load_config() + self._config_log(config.verbose) self.templates = Path(templates or self.config["templates_path"]) self.metadata_path = Path(metadata_path or self.config["metadata_path"]) @@ -295,6 +302,15 @@ def _init_config(self, force): config_file.open("w", encoding="utf-8").write(tomlkit.dumps(c_file)) + def _config_log(self, verbose: bool): + logger.remove() # remove o default handler + logger_level = "INFO" if verbose else "ERROR" + logger.add( + sys.stderr, + level=logger_level, + format="{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} | {name}:{function}:{line} - {message}", + ) + def _load_config(self): if getenv(constants.ENV_CONFIG.value): diff --git a/python-package/basedosdados/upload/dataset.py b/python-package/basedosdados/upload/dataset.py index 25c514011..d6f87b0a4 100644 --- a/python-package/basedosdados/upload/dataset.py +++ b/python-package/basedosdados/upload/dataset.py @@ -1,5 +1,6 @@ from pathlib import Path from google.cloud import bigquery +from loguru import logger from google.api_core.exceptions import Conflict @@ -39,14 +40,18 @@ def _loop_modes(self, mode="all"): for m in mode ) - def _setup_dataset_object(self, dataset_id): + def _setup_dataset_object(self, dataset_id, location=None): dataset = bigquery.Dataset(dataset_id) + ## TODO: not being used since 1.6.0 - need to redo the description tha goes to bigquery + dataset.description = "Para saber mais acesse https://basedosdados.org/" # dataset.description = self._render_template( # Path("dataset/dataset_description.txt"), self.dataset_config # ) + dataset.location = location + return dataset def _write_readme_file(self): @@ -102,42 +107,53 @@ def init(self, replace=False): return self - def publicize(self, mode="all"): + def publicize(self, mode="all", dataset_is_public=True): """Changes IAM configuration to turn BigQuery dataset public. Args: mode (bool): Which dataset to create [prod|staging|all]. + dataset_is_public (bool): Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public. """ for m in self._loop_modes(mode): dataset = m["client"].get_dataset(m["id"]) entries = dataset.access_entries - - entries.extend( - [ - bigquery.AccessEntry( - role="roles/bigquery.dataViewer", - entity_type="iamMember", - entity_id="allUsers", - ), - bigquery.AccessEntry( - role="roles/bigquery.metadataViewer", - entity_type="iamMember", - entity_id="allUsers", - ), - bigquery.AccessEntry( - role="roles/bigquery.user", - entity_type="iamMember", - entity_id="allUsers", - ), - ] - ) - dataset.access_entries = entries + # TODO https://github.com/basedosdados/mais/pull/1020 + if dataset_is_public and "staging" not in dataset.dataset_id: + entries.extend( + [ + bigquery.AccessEntry( + role="roles/bigquery.dataViewer", + entity_type="iamMember", + entity_id="allUsers", + ), + bigquery.AccessEntry( + role="roles/bigquery.metadataViewer", + entity_type="iamMember", + entity_id="allUsers", + ), + bigquery.AccessEntry( + role="roles/bigquery.user", + entity_type="iamMember", + entity_id="allUsers", + ), + ] + ) + dataset.access_entries = entries m["client"].update_dataset(dataset, ["access_entries"]) + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.dataset_id, + mode=mode, + object="Dataset", + action="publicized", + ) - def create(self, mode="all", if_exists="raise"): + def create( + self, mode="all", if_exists="raise", dataset_is_public=True, location=None + ): """Creates BigQuery datasets given `dataset_id`. It can create two datasets: @@ -156,6 +172,12 @@ def create(self, mode="all", if_exists="raise"): * update : Update dataset description * pass : Do nothing + dataset_is_public (bool): Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public. + + location (str): Optional. Location of dataset data. + List of possible region names locations: https://cloud.google.com/bigquery/docs/locations + + Raises: Warning: Dataset already exists and if_exists is set to `raise` """ @@ -171,13 +193,21 @@ def create(self, mode="all", if_exists="raise"): for m in self._loop_modes(mode): # Construct a full Dataset object to send to the API. - dataset_obj = self._setup_dataset_object(m["id"]) + dataset_obj = self._setup_dataset_object(m["id"], location=location) # Send the dataset to the API for creation, with an explicit timeout. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. try: job = m["client"].create_dataset(dataset_obj) # Make an API request. + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.dataset_id, + mode=mode, + object="Dataset", + action="created", + ) + except Conflict: if if_exists == "pass": @@ -186,7 +216,7 @@ def create(self, mode="all", if_exists="raise"): raise Conflict(f"Dataset {self.dataset_id} already exists") # Make prod dataset public - self.publicize() + self.publicize(dataset_is_public=dataset_is_public) def delete(self, mode="all"): """Deletes dataset in BigQuery. Toogle mode to choose which dataset to delete. @@ -198,12 +228,22 @@ def delete(self, mode="all"): for m in self._loop_modes(mode): m["client"].delete_dataset(m["id"], delete_contents=True, not_found_ok=True) + logger.info( + " {object} {object_id}_{mode} was {action}!", + object_id=self.dataset_id, + mode=mode, + object="Dataset", + action="deleted", + ) - def update(self, mode="all"): + def update(self, mode="all", location=None): """Update dataset description. Toogle mode to choose which dataset to update. Args: mode (str): Optional. Which dataset to update [prod|staging|all] + location (str): Optional. Location of dataset data. + List of possible region names locations: https://cloud.google.com/bigquery/docs/locations + """ for m in self._loop_modes(mode): @@ -212,5 +252,17 @@ def update(self, mode="all"): # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. dataset = m["client"].update_dataset( - self._setup_dataset_object(m["id"]), fields=["description"] + self._setup_dataset_object( + m["id"], + location=location, + ), + fields=["description"], ) # Make an API request. + + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.dataset_id, + mode=mode, + object="Dataset", + action="updated", + ) diff --git a/python-package/basedosdados/upload/datatypes.py b/python-package/basedosdados/upload/datatypes.py index 4dd9893f4..d6de62870 100644 --- a/python-package/basedosdados/upload/datatypes.py +++ b/python-package/basedosdados/upload/datatypes.py @@ -1,5 +1,7 @@ from google.cloud import bigquery import csv +import pandas as pd +import pandavro class Datatype: @@ -19,11 +21,19 @@ def __init__( def header(self, data_sample_path): if self.source_format == "csv": - return next(csv.reader(open(data_sample_path, "r", encoding="utf-8"))) + + elif self.source_format == "avro": + dataframe = pandavro.read_avro(str(data_sample_path)) + return list(dataframe.columns.values) + + elif self.source_format == "parquet": + dataframe = pd.read_parquet(str(data_sample_path)) + return list(dataframe.columns.values) + else: raise NotImplementedError( - "Base dos Dados just supports comma separated csv files" + "Base dos Dados just supports comma separated csv, avro and parquet files" ) def partition(self): @@ -48,12 +58,18 @@ def external_config(self): _external_config.autodetect = False _external_config.schema = self.table_obj._load_schema(self.mode) - # You can add new formats here + elif self.source_format == "avro": + + _external_config = bigquery.ExternalConfig("AVRO") + + elif self.source_format == "parquet": + + _external_config = bigquery.ExternalConfig("PARQUET") else: raise NotImplementedError( - "Base dos Dados just supports comma separated csv files" + "Base dos Dados just supports comma separated csv, avro and parquet files" ) _external_config.source_uris = f"gs://{self.table_obj.bucket_name}/staging/{self.table_obj.dataset_id}/{self.table_obj.table_id}/*" diff --git a/python-package/basedosdados/upload/metadata.py b/python-package/basedosdados/upload/metadata.py index c32b114bc..ecd9e9a23 100644 --- a/python-package/basedosdados/upload/metadata.py +++ b/python-package/basedosdados/upload/metadata.py @@ -2,6 +2,7 @@ from copy import deepcopy from functools import lru_cache +from loguru import logger import requests import ruamel.yaml as ryaml @@ -121,7 +122,8 @@ def ckan_data_dict(self) -> dict: "private": ckan_dataset.get("private") or False, "owner_org": self.owner_org, "resources": ckan_dataset.get("resources", []) - or [{"resource_type": "external_link", "name": ""}], + or [{"resource_type": "external_link", "name": ""}] + or [{"resource_type": "information_request", "name": ""}], "groups": [ {"name": group} for group in self.local_metadata.get("groups", []) or [] ], @@ -133,6 +135,9 @@ def ckan_data_dict(self) -> dict: { "key": "dataset_args", "value": { + "short_description": self.local_metadata.get( + "short_description" + ), "description": self.local_metadata.get("description"), "ckan_url": self.local_metadata.get("ckan_url"), "github_url": self.local_metadata.get("github_url"), @@ -154,30 +159,35 @@ def ckan_data_dict(self) -> dict: "spatial_coverage": self.local_metadata.get("spatial_coverage"), "temporal_coverage": self.local_metadata.get("temporal_coverage"), "update_frequency": self.local_metadata.get("update_frequency"), - "entity": self.local_metadata.get("entity"), - "time_unit": self.local_metadata.get("time_unit"), - "identifying_columns": self.local_metadata.get( - "identifying_columns" - ), + "observation_level": self.local_metadata.get("observation_level"), "last_updated": self.local_metadata.get("last_updated"), + "version": self.local_metadata.get("version"), "published_by": self.local_metadata.get("published_by"), "data_cleaned_by": self.local_metadata.get("data_cleaned_by"), "data_cleaning_description": self.local_metadata.get( "data_cleaning_description" ), + "data_cleaning_code_url": self.local_metadata.get( + "data_cleaning_code_url" + ), + "partner_organization": self.local_metadata.get( + "partner_organization" + ), "raw_files_url": self.local_metadata.get("raw_files_url"), "auxiliary_files_url": self.local_metadata.get( "auxiliary_files_url" ), "architecture_url": self.local_metadata.get("architecture_url"), - "covered_by_dictionary": self.local_metadata.get( - "covered_by_dictionary" - ), "source_bucket_name": self.local_metadata.get("source_bucket_name"), "project_id_prod": self.local_metadata.get("project_id_prod"), "project_id_staging": self.local_metadata.get("project_id_staging"), "partitions": self.local_metadata.get("partitions"), - "bdm_file_size": self.local_metadata.get("bdm_file_size"), + "uncompressed_file_size": self.local_metadata.get( + "uncompressed_file_size" + ), + "compressed_file_size": self.local_metadata.get( + "compressed_file_size" + ), "columns": self.local_metadata.get("columns"), "metadata_modified": self.local_metadata.get("metadata_modified"), "package_id": ckan_dataset.get("id"), @@ -324,11 +334,18 @@ def create( # if `dataset_config.yaml` doesn't exist but user wants to create # it alongside `table_config.yaml` dataset_config_exists = ( - self.metadata_path / "dataset_config.yaml" + self.metadata_path / self.dataset_id / "dataset_config.yaml" ).exists() if self.table_id and not table_only and not dataset_config_exists: self.dataset_metadata_obj.create(if_exists=if_exists) + logger.success( + " {object} {object_id} was {action}!", + object_id=self.table_id, + object="Metadata", + action="created", + ) + return self def validate(self) -> bool: @@ -354,6 +371,13 @@ def validate(self) -> bool: message = f"{self.filepath} has validation errors: {error}" raise BaseDosDadosException(message) + logger.success( + " {object} {object_id} was {action}!", + object_id=self.table_id, + object="Metadata", + action="validated", + ) + return True def publish( @@ -414,7 +438,7 @@ def publish( self.validate() assert self.is_updated(), ( - f"Could not publish metadata due to out of date config file. " + f"Could not publish metadata due to out-of-date config file. " f"Please run `basedosdados metadata create {self.dataset_id} " f"{self.table_id or ''}` to get the most recently updated met" f"adata and apply your changes to it." @@ -450,6 +474,14 @@ def publish( # recreate local metadata YAML file with the published data if published and update_locally: self.create(if_exists="replace") + self.dataset_metadata_obj.create(if_exists="replace") + + logger.success( + " {object} {object_id} was {action}!", + object_id=data_dict, + object="Metadata", + action="published", + ) return published @@ -684,15 +716,15 @@ def build_yaml_object( for remote_column in yaml["columns"]: if remote_column["name"] == local_column: remote_column["is_partition"] = True - yaml["partitions"] = ", ".join(partition_columns) + yaml["partitions"] = partition_columns # Nullify `partitions` field in case of other-than-None empty values if yaml.get("partitions") == "": yaml["partitions"] = None - # Add dataset_id and table_id - yaml["dataset_id"] = dataset_id if table_id: + # Add dataset_id and table_id + yaml["dataset_id"] = dataset_id yaml["table_id"] = table_id # Add gcloud config variables diff --git a/python-package/basedosdados/upload/storage.py b/python-package/basedosdados/upload/storage.py index 491726f7c..876a728f7 100644 --- a/python-package/basedosdados/upload/storage.py +++ b/python-package/basedosdados/upload/storage.py @@ -7,22 +7,12 @@ from basedosdados.upload.base import Base from basedosdados.exceptions import BaseDosDadosException +from loguru import logger from google.api_core import exceptions from google.api_core.retry import Retry # google retryble exceptions. References: https://googleapis.dev/python/storage/latest/retry_timeout.html#module-google.cloud.storage.retry -_MY_RETRIABLE_TYPES = [ - exceptions.TooManyRequests, # 429 - exceptions.InternalServerError, # 500 - exceptions.BadGateway, # 502 - exceptions.ServiceUnavailable, # 503 - exceptions.from_http_response, -] - - -def _is_retryable(exc): - return isinstance(exc, _MY_RETRIABLE_TYPES) class Storage(Base): @@ -228,6 +218,14 @@ def upload( "to 'replace' to overwrite data." ) + logger.success( + " {object} {filename}_{mode} was {action}!", + filename=filepath.name, + mode=mode, + object="File", + action="uploaded", + ) + def download( self, filename="*", @@ -311,6 +309,14 @@ def download( # download blob to savepath blob.download_to_filename(filename=f"{savepath}/{blob.name}") + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.dataset_id, + mode=mode, + object="File", + action="downloaded", + ) + def delete_file(self, filename, mode, partitions=None, not_found_ok=False): """Deletes file from path `/////`. @@ -336,17 +342,24 @@ def delete_file(self, filename, mode, partitions=None, not_found_ok=False): if mode == "all" else [mode] ) - # define retry policy for google cloud storage exceptions for m in mode: blob = self.bucket.blob(self._build_blob_name(filename, m, partitions)) if blob.exists() or not blob.exists() and not not_found_ok: - blob.delete(retry=Retry(predicate=_is_retryable)) + blob.delete() else: return + logger.success( + " {object} {filename}_{mode} was {action}!", + filename=filename, + mode=mode, + object="File", + action="deleted", + ) + def delete_table(self, mode="staging", bucket_name=None, not_found_ok=False): """Deletes a table from storage, sends request in batches. @@ -395,11 +408,11 @@ def delete_table(self, mode="staging", bucket_name=None, not_found_ok=False): tqdm(table_blobs_chunks, desc="Delete Table Chunk") ): counter = 0 - while counter < 100: + while counter < 10: try: with self.client["storage_staging"].batch(): for blob in source_table: - blob.delete(retry=Retry(predicate=_is_retryable)) + blob.delete() break except Exception as e: print( @@ -408,6 +421,13 @@ def delete_table(self, mode="staging", bucket_name=None, not_found_ok=False): time.sleep(5) counter += 1 traceback.print_exc(file=sys.stderr) + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.table_id, + mode=mode, + object="Table", + action="deleted", + ) def copy_table( self, @@ -461,14 +481,13 @@ def copy_table( tqdm(source_table_ref_chunks, desc="Copy Table Chunk") ): counter = 0 - while counter < 100: + while counter < 10: try: with self.client["storage_staging"].batch(): for blob in source_table: self.bucket.copy_blob( blob, destination_bucket=destination_bucket, - retry=Retry(predicate=_is_retryable), ) break except Exception as e: @@ -478,3 +497,10 @@ def copy_table( counter += 1 time.sleep(5) traceback.print_exc(file=sys.stderr) + logger.success( + " {object} {object_id}_{mode} was {action}!", + object_id=self.table_id, + mode=mode, + object="Table", + action="copied", + ) diff --git a/python-package/basedosdados/upload/table.py b/python-package/basedosdados/upload/table.py index 8d9bc1a22..6ab01ef94 100644 --- a/python-package/basedosdados/upload/table.py +++ b/python-package/basedosdados/upload/table.py @@ -1,5 +1,7 @@ +from grpc import Status from jinja2 import Template from pathlib import Path, PosixPath +from loguru import logger import json import csv from copy import deepcopy @@ -52,10 +54,7 @@ def _get_table_obj(self, mode): def _is_partitioned(self): ## check if the table are partitioned, need the split because of a change in the type of partitions in pydantic partitions = self.table_config["partitions"] - if partitions: - partitions = partitions.split(",") - - if partitions is None: + if not partitions: return False elif isinstance(partitions, list): @@ -221,8 +220,8 @@ def _make_template(self, columns, partition_columns, if_table_config_exists): self._make_publish_sql() - def _sheet_to_df(self, columns_config_url): - url = columns_config_url.replace("edit#gid=", "export?format=csv&gid=") + def _sheet_to_df(self, columns_config_url_or_path): + url = columns_config_url_or_path.replace("edit#gid=", "export?format=csv&gid=") try: return pd.read_csv(StringIO(requests.get(url).content.decode("utf-8"))) except: @@ -247,22 +246,26 @@ def table_exists(self, mode): else: return False - def update_columns(self, columns_config_url): + def update_columns(self, columns_config_url_or_path=None): """ - Fills columns in table_config.yaml automatically using a public google sheets URL. Also regenerate + Fills columns in table_config.yaml automatically using a public google sheets URL or a local file. Also regenerate publish.sql and autofill type using bigquery_type. - The URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. The sheet must contain the columns: - - nome: column name - - descricao: column description - - tipo: column bigquery type - - unidade_medida: column mesurement unit - - dicionario: column related dictionary - - nome_diretorio: column related directory in the format .: - + - name: column name + - description: column description + - bigquery_type: column bigquery type + - measurement_unit: column mesurement unit + - covered_by_dictionary: column related dictionary + - directory_column: column related directory in the format .: + - temporal_coverage: column temporal coverage + - has_sensitive_data: the column has sensitive data + - observations: column observations Args: - columns_config_url (str): google sheets URL. + columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL. + Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats. + Google sheets URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. + """ ruamel = ryaml.YAML() ruamel.preserve_quotes = True @@ -270,66 +273,99 @@ def update_columns(self, columns_config_url): table_config_yaml = ruamel.load( (self.table_folder / "table_config.yaml").open(encoding="utf-8") ) - if ( - "edit#gid=" not in columns_config_url - or "https://docs.google.com/spreadsheets/d/" not in columns_config_url - or not columns_config_url.split("=")[1].isdigit() - ): - raise BaseDosDadosException( - "The Google sheet url not in correct format." - "The url must be in the format https://docs.google.com/spreadsheets/d//edit#gid=" - ) - df = self._sheet_to_df(columns_config_url) - df = df.fillna("NULL") + if "https://docs.google.com/spreadsheets/d/" in columns_config_url_or_path: + if ( + "edit#gid=" not in columns_config_url_or_path + or "https://docs.google.com/spreadsheets/d/" + not in columns_config_url_or_path + or not columns_config_url_or_path.split("=")[1].isdigit() + ): + raise BaseDosDadosException( + "The Google sheet url not in correct format." + "The url must be in the format https://docs.google.com/spreadsheets/d//edit#gid=" + ) + df = self._sheet_to_df(columns_config_url_or_path) + else: + file_type = columns_config_url_or_path.split(".")[-1] + if file_type == "csv": + df = pd.read_csv(columns_config_url_or_path, encoding="utf-8") + elif file_type in ["xls", "xlsx", "xlsm", "xlsb", "odf", "ods", "odt"]: + df = pd.read_excel(columns_config_url_or_path) + else: + raise BaseDosDadosException( + "File not suported. Only csv, xls, xlsx, xlsm, xlsb, odf, ods, odt are supported." + ) - if "nome" not in df.columns.tolist(): + if "name" not in df.columns.tolist(): + raise BaseDosDadosException( + "Column 'name' not found in Google the google sheet. " + "The sheet must contain the column name: 'name'" + ) + elif "description" not in df.columns.tolist(): + raise BaseDosDadosException( + "Column 'description' not found in Google the google sheet. " + "The sheet must contain the column description: 'description'" + ) + elif "bigquery_type" not in df.columns.tolist(): + raise BaseDosDadosException( + "Column 'bigquery_type' not found in Google the google sheet. " + "The sheet must contain the column type: 'bigquery_type'" + ) + elif "measurement_unit" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'nome' not found in Google the google sheet. " - "The sheet must contain the column name: 'nome'" + "Column 'measurement_unit' not found in Google the google sheet. " + "The sheet must contain the column measurement unit: 'measurement_unit'" ) - elif "descricao" not in df.columns.tolist(): + elif "directory_column" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'descricao' not found in Google the google sheet. " - "The sheet must contain the column description: 'descricao'" + "Column 'directory_column' not found in Google the google sheet. " + "The sheet must contain the column dictionary: 'directory_column'" ) - elif "tipo" not in df.columns.tolist(): + elif "directory_column" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'tipo' not found in Google the google sheet. " - "The sheet must contain the column type: 'tipo'" + "Column 'directory_column' not found in Google the google sheet. " + "The sheet must contain the column dictionary name: 'directory_column'" ) - elif "unidade_medida" not in df.columns.tolist(): + elif "temporal_coverage" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'unidade_medida' not found in Google the google sheet. " - "The sheet must contain the column measurement unit: 'unidade_medida'" + "Column 'temporal_coverage' not found in Google the google sheet. " + "The sheet must contain the column dictionary name: 'temporal_coverage'" ) - elif "dicionario" not in df.columns.tolist(): + elif "has_sensitive_data" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'dicionario' not found in Google the google sheet. " - "The sheet must contain the column dictionary: 'dicionario'" + "Column 'temporal_coverage' not found in Google the google sheet. " + "The sheet must contain the column dictionary name: 'temporal_coverage'" ) - elif "nome_diretorio" not in df.columns.tolist(): + elif "observations" not in df.columns.tolist(): raise BaseDosDadosException( - "Column 'nome_diretorio' not found in Google the google sheet. " - "The sheet must contain the column dictionary name: 'nome_diretorio'" + "Column 'observations' not found in Google the google sheet. " + "The sheet must contain the column dictionary name: 'observations'" ) + df = df.fillna("NULL") columns_parameters = zip( - df["nome"].tolist(), - df["descricao"].tolist(), - df["tipo"].tolist(), - df["unidade_medida"].tolist(), - df["dicionario"].tolist(), - df["nome_diretorio"].tolist(), + df["name"].tolist(), + df["description"].tolist(), + df["bigquery_type"].tolist(), + df["measurement_unit"].tolist(), + df["covered_by_dictionary"].tolist(), + df["directory_column"].tolist(), + df["temporal_coverage"].tolist(), + df["has_sensitive_data"].tolist(), + df["observations"].tolist(), ) for ( name, description, - tipo, - unidade_medida, - dicionario, - nome_diretorio, + bigquery_type, + measurement_unit, + covered_by_dictionary, + directory_column, + temporal_coverage, + has_sensitive_data, + observations, ) in columns_parameters: for col in table_config_yaml["columns"]: if col["name"] == name: @@ -339,34 +375,52 @@ def update_columns(self, columns_config_url): ) col["bigquery_type"] = ( - col["bigquery_type"] if tipo == "NULL" else tipo.lower() + col["bigquery_type"] + if bigquery_type == "NULL" + else bigquery_type.lower() ) col["measurement_unit"] = ( col["measurement_unit"] - if unidade_medida == "NULL" - else unidade_medida + if measurement_unit == "NULL" + else measurement_unit ) col["covered_by_dictionary"] = ( - "no" if dicionario == "NULL" else "yes" + "no" + if covered_by_dictionary == "NULL" + else covered_by_dictionary + ) + + col["temporal_coverage"] = ( + col["temporal_coverage"] + if temporal_coverage == "NULL" + else [temporal_coverage] ) - dataset = nome_diretorio.split(".")[0] + col["observations"] = ( + col["observations"] if observations == "NULL" else observations + ) + + col["has_sensitive_data"] = ( + "no" if has_sensitive_data == "NULL" else has_sensitive_data + ) + + dataset = directory_column.split(".")[0] col["directory_column"]["dataset_id"] = ( col["directory_column"]["dataset_id"] if dataset == "NULL" else dataset ) - table = nome_diretorio.split(".")[-1].split(":")[0] + table = directory_column.split(".")[-1].split(":")[0] col["directory_column"]["table_id"] = ( col["directory_column"]["table_id"] if table == "NULL" else table ) - column = nome_diretorio.split(".")[-1].split(":")[-1] + column = directory_column.split(".")[-1].split(":")[-1] col["directory_column"]["column_name"] = ( col["directory_column"]["column_name"] if column == "NULL" @@ -403,7 +457,7 @@ def init( if_folder_exists="raise", if_table_config_exists="raise", source_format="csv", - columns_config_url=None, + columns_config_url_or_path=None, ): """Initialize table folder at metadata_path at `metadata_path//`. @@ -417,7 +471,8 @@ def init( Args: data_sample_path (str, pathlib.PosixPath): Optional. Data sample path to auto complete columns names - It supports Comma Delimited CSV. + It supports Comma Delimited CSV, Apache Avro and + Apache Parquet. if_folder_exists (str): Optional. What to do if table folder exists @@ -431,11 +486,12 @@ def init( * 'replace' : Replace files with blank template * 'pass' : Do nothing source_format (str): Optional - Data source format. Only 'csv' is supported. Defaults to 'csv'. + Data source format. Only 'csv', 'avro' and 'parquet' + are supported. Defaults to 'csv'. - columns_config_url (str): google sheets URL. - The URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. - The sheet must contain the column name: "coluna" and column description: "descricao" + columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL. + Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats. + Google sheets URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. Raises: FileExistsError: If folder exists and replace is False. @@ -479,7 +535,7 @@ def init( data_sample_path = [ f for f in data_sample_path.glob("**/*") - if f.is_file() and f.suffix == ".csv" + if f.is_file() and f.suffix == f".{source_format}" ][0] partition_columns = [ @@ -528,8 +584,8 @@ def init( # Raise: without a path to data sample, should not replace config files with empty template self._make_template(columns, partition_columns, if_table_config_exists) - if columns_config_url is not None: - self.update_columns(columns_config_url) + if columns_config_url_or_path is not None: + self.update_columns(columns_config_url_or_path) return self @@ -542,7 +598,9 @@ def create( if_storage_data_exists="raise", if_table_config_exists="raise", source_format="csv", - columns_config_url=None, + columns_config_url_or_path=None, + dataset_is_public=True, + location=None, ): """Creates BigQuery table at staging dataset. @@ -558,6 +616,8 @@ def create( It currently supports the types: - Comma Delimited CSV + - Apache Avro + - Apache Parquet Data can also be partitioned following the hive partitioning scheme `=/=` - for instance, @@ -588,11 +648,17 @@ def create( * 'replace' : Replace table * 'pass' : Do nothing source_format (str): Optional - Data source format. Only 'csv' is supported. Defaults to 'csv'. + Data source format. Only 'csv', 'avro' and 'parquet' + are supported. Defaults to 'csv'. + + columns_config_url_or_path (str): Path to the local architeture file or a public google sheets URL. + Path only suports csv, xls, xlsx, xlsm, xlsb, odf, ods, odt formats. + Google sheets URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. + + dataset_is_public (bool): Control if prod dataset is public or not. By default staging datasets like `dataset_id_staging` are not public. - columns_config_url (str): google sheets URL. - The URL must be in the format https://docs.google.com/spreadsheets/d//edit#gid=. - The sheet must contain the column name: "coluna" and column description: "descricao" + location (str): Optional. Location of dataset data. + List of possible region names locations: https://cloud.google.com/bigquery/docs/locations """ @@ -632,17 +698,19 @@ def create( except FileExistsError: pass - dataset_obj.create(if_exists="pass") + dataset_obj.create( + if_exists="pass", location=location, dataset_is_public=dataset_is_public + ) self.init( data_sample_path=path, if_folder_exists="replace", if_table_config_exists=if_table_config_exists, - columns_config_url=columns_config_url, + columns_config_url_or_path=columns_config_url_or_path, + source_format=source_format, ) table = bigquery.Table(self.table_full_name["staging"]) - table.external_data_configuration = Datatype( self, source_format, "staging", partitioned=self._is_partitioned() ).external_config @@ -675,9 +743,17 @@ def create( self.client["bigquery_staging"].create_table(table) + + logger.success( + "{object} {object_id} was {action}!", + object_id=self.table_id, + object="Table", + action="created", + ) + + def update(self, mode="all", not_found_ok=True): """Updates BigQuery schema and description. - Args: mode (str): Optional. Table of which table to update [prod|staging|all] @@ -711,12 +787,18 @@ def update(self, mode="all", not_found_ok=True): encoding="utf-8", ).write(table.description) - if m == "prod": + if m == "staging": table.schema = self._load_schema(m) self.client[f"bigquery_{m}"].update_table( table, fields=["description", "schema"] ) + logger.success( + " {object} {object_id} was {action}!", + object_id=self.table_id, + object="Table", + action="updated", + ) def publish(self, if_exists="raise"): """Creates BigQuery table at production dataset. @@ -750,7 +832,13 @@ def publish(self, if_exists="raise"): (self.table_folder / "publish.sql").open("r", encoding="utf-8").read() ).result() - self.update("prod") + self.update() + logger.success( + " {object} {object_id} was {action}!", + object_id=self.table_id, + object="Table", + action="published", + ) def delete(self, mode): """Deletes table in BigQuery. @@ -764,11 +852,26 @@ def delete(self, mode): if mode == "all": for m, n in self.table_full_name[mode].items(): self.client[f"bigquery_{m}"].delete_table(n, not_found_ok=True) + logger.info( + " {object} {object_id}_{mode} was {action}!", + object_id=self.table_id, + mode=mode, + object="Table", + action="deleted", + ) else: self.client[f"bigquery_{mode}"].delete_table( self.table_full_name[mode], not_found_ok=True ) + logger.info( + " {object} {object_id}_{mode} was {action}!", + object_id=self.table_id, + mode=mode, + object="Table", + action="deleted", + ) + def append(self, filepath, partitions=None, if_exists="replace", **upload_args): """Appends new data to existing BigQuery table. @@ -801,3 +904,9 @@ def append(self, filepath, partitions=None, if_exists="replace", **upload_args): if_exists=if_exists, **upload_args, ) + logger.success( + " {object} {object_id} was {action}!", + object_id=self.table_id, + object="Table", + action="appended", + ) diff --git a/python-package/poetry.lock b/python-package/poetry.lock index f6286315c..858b71a26 100644 --- a/python-package/poetry.lock +++ b/python-package/poetry.lock @@ -1,119 +1,81 @@ [[package]] -category = "main" -description = "Async http client/server framework (asyncio)" -marker = "python_version >= \"3.6\"" -name = "aiohttp" -optional = false -python-versions = ">=3.5.3" -version = "3.6.2" - -[package.dependencies] -async-timeout = ">=3.0,<4.0" -attrs = ">=17.3.0" -chardet = ">=2.0,<4.0" -multidict = ">=4.5,<5.0" -yarl = ">=1.0,<2.0" - -[package.dependencies.idna-ssl] -python = "<3.7" -version = ">=1.0" - -[package.dependencies.typing-extensions] -python = "<3.7" -version = ">=3.6.5" - -[package.extras] -speedups = ["aiodns", "brotlipy", "cchardet"] - -[[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = "*" -version = "1.4.4" [[package]] -category = "dev" -description = "Disable App Nap on OS X 10.9" -marker = "sys_platform == \"darwin\" or platform_system == \"Darwin\" or python_version >= \"3.3\" and sys_platform == \"darwin\"" name = "appnope" +version = "0.1.0" +description = "Disable App Nap on OS X 10.9" +category = "dev" optional = false python-versions = "*" -version = "0.1.0" [[package]] -category = "dev" -description = "The secure Argon2 password hashing algorithm." name = "argon2-cffi" +version = "20.1.0" +description = "The secure Argon2 password hashing algorithm." +category = "dev" optional = false python-versions = "*" -version = "20.1.0" [package.dependencies] cffi = ">=1.0.0" six = "*" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] docs = ["sphinx"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pytest"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] -category = "dev" -description = "Async generators and context managers for Python 3.5+" name = "async-generator" -optional = false -python-versions = ">=3.5" version = "1.10" - -[[package]] -category = "main" -description = "Timeout context manager for asyncio programs" -marker = "python_version >= \"3.6\"" -name = "async-timeout" +description = "Async generators and context managers for Python 3.5+" +category = "dev" optional = false -python-versions = ">=3.5.3" -version = "3.0.1" +python-versions = ">=3.5" [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "main" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.2.0" +description = "Classes Without Boilerplate" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.2.0" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "dev" -description = "Specifications for callback functions passed in to an API" name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" optional = false python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "The uncompromising code formatter." name = "black" +version = "20.8b1" +description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.6" -version = "20.8b1" [package.dependencies] appdirs = "*" @@ -125,21 +87,17 @@ toml = ">=0.10.1" typed-ast = ">=1.4.0" typing-extensions = ">=3.7.4" -[package.dependencies.dataclasses] -python = "<3.7" -version = ">=0.6" - [package.extras] colorama = ["colorama (>=0.4.3)"] d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -category = "dev" -description = "An easy safelist-based HTML-sanitizing tool." name = "bleach" +version = "3.2.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "3.2.1" [package.dependencies] packaging = "*" @@ -147,110 +105,137 @@ six = ">=1.9.0" webencodings = "*" [[package]] -category = "main" -description = "Extensible memoizing collections and decorators" name = "cachetools" +version = "4.1.1" +description = "Extensible memoizing collections and decorators" +category = "main" optional = false python-versions = "~=3.5" -version = "4.1.1" [[package]] -category = "main" -description = "Python package for providing Mozilla's CA Bundle." name = "certifi" +version = "2020.6.20" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = "*" -version = "2020.6.20" [[package]] -category = "main" -description = "Foreign Function Interface for Python calling C code." name = "cffi" +version = "1.14.3" +description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = "*" -version = "1.14.3" [package.dependencies] pycparser = "*" [[package]] -category = "main" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" +version = "3.0.4" +description = "Universal encoding detector for Python 2 and 3" +category = "main" optional = false python-versions = "*" -version = "3.0.4" [[package]] -category = "dev" -description = "Composable command line interface toolkit" +name = "ckanapi" +version = "4.6" +description = "A command line interface and Python module for accessing the CKAN Action API" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +docopt = "*" +python-slugify = ">=1.0" +requests = "*" +six = ">=1.9,<2.0" + +[[package]] name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "7.1.2" +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [[package]] -category = "dev" -description = "Cross-platform colored terminal text." -marker = "python_version >= \"3.3\" and sys_platform == \"win32\" or sys_platform == \"win32\"" name = "colorama" +version = "0.4.3" +description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.3" [[package]] +name = "decorator" +version = "4.4.2" +description = "Decorators for Humans" category = "dev" -description = "A backport of the dataclasses module for Python 3.6" -marker = "python_version < \"3.7\"" -name = "dataclasses" optional = false -python-versions = "*" -version = "0.6" +python-versions = ">=2.6, !=3.0.*, !=3.1.*" [[package]] +name = "defusedxml" +version = "0.6.0" +description = "XML bomb protection for Python stdlib modules" category = "dev" -description = "Decorators for Humans" -name = "decorator" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "4.4.2" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -category = "dev" -description = "XML bomb protection for Python stdlib modules" -name = "defusedxml" +name = "docopt" +version = "0.6.2" +description = "Pythonic argument parser, that will make you smile" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.6.0" +python-versions = "*" [[package]] -category = "dev" -description = "Discover and load entry points from installed packages." name = "entrypoints" +version = "0.3" +description = "Discover and load entry points from installed packages." +category = "dev" optional = false python-versions = ">=2.7" -version = "0.3" [[package]] +name = "fastavro" +version = "1.4.9" +description = "Fast read/write of AVRO files" category = "main" -description = "Google API client core library" +optional = false +python-versions = ">=3.7" + +[package.extras] +codecs = ["python-snappy", "zstandard", "lz4"] +lz4 = ["lz4"] +snappy = ["python-snappy"] +zstandard = ["zstandard"] + +[[package]] name = "google-api-core" +version = "1.31.5" +description = "Google API client core library" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.22.2" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" [package.dependencies] -google-auth = ">=1.21.1,<2.0dev" +google-auth = ">=1.25.0,<2.0dev" googleapis-common-protos = ">=1.6.0,<2.0dev" -protobuf = ">=3.12.0" +grpcio = {version = ">=1.29.0,<2.0dev", optional = true, markers = "extra == \"grpc\""} +packaging = ">=14.3" +protobuf = {version = ">=3.12.0", markers = "python_version > \"3\""} pytz = "*" requests = ">=2.18.0,<3.0.0dev" -setuptools = ">=34.0.0" -six = ">=1.10.0" - -[package.dependencies.grpcio] -optional = true -version = ">=1.29.0,<2.0dev" +six = ">=1.13.0" [package.extras] grpc = ["grpcio (>=1.29.0,<2.0dev)"] @@ -258,34 +243,31 @@ grpcgcp = ["grpcio-gcp (>=0.2.2)"] grpcio-gcp = ["grpcio-gcp (>=0.2.2)"] [[package]] -category = "main" -description = "Google Authentication Library" name = "google-auth" +version = "1.35.0" +description = "Google Authentication Library" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.22.0" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" [package.dependencies] cachetools = ">=2.0.0,<5.0" pyasn1-modules = ">=0.2.1" -setuptools = ">=40.3.0" +rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} six = ">=1.9.0" -[package.dependencies.aiohttp] -python = ">=3.6" -version = ">=3.6.2,<4.0.0dev" - -[package.dependencies.rsa] -python = ">=3.5" -version = ">=3.1.4,<5" +[package.extras] +aiohttp = ["requests (>=2.20.0,<3.0.0dev)", "aiohttp (>=3.6.2,<4.0.0dev)"] +pyopenssl = ["pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] [[package]] -category = "main" -description = "Google Authentication Library" name = "google-auth-oauthlib" +version = "0.4.1" +description = "Google Authentication Library" +category = "main" optional = false python-versions = "*" -version = "0.4.1" [package.dependencies] google-auth = "*" @@ -295,40 +277,43 @@ requests-oauthlib = ">=0.7.0" tool = ["click"] [[package]] -category = "main" -description = "Google BigQuery API client library" name = "google-cloud-bigquery" +version = "2.30.1" +description = "Google BigQuery API client library" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.28.0" +python-versions = ">=3.6, <3.11" [package.dependencies] -google-api-core = ">=1.21.0,<2.0dev" -google-cloud-core = ">=1.4.1,<2.0dev" -google-resumable-media = ">=0.6.0,<2.0dev" -six = ">=1.13.0,<2.0.0dev" +google-api-core = {version = ">=1.29.0,<3.0.0dev", extras = ["grpc"]} +google-cloud-core = ">=1.4.1,<3.0.0dev" +google-resumable-media = ">=0.6.0,<3.0dev" +grpcio = ">=1.38.1,<2.0dev" +packaging = ">=14.3" +proto-plus = ">=1.10.0" +protobuf = ">=3.12.0" +python-dateutil = ">=2.7.2,<3.0dev" +requests = ">=2.18.0,<3.0.0dev" [package.extras] -all = ["google-cloud-bigquery-storage (>=1.0.0,<2.0.0dev)", "grpcio (>=1.32.0,<2.0dev)", "pyarrow (>=1.0.0,<2.0dev)", "pandas (>=0.23.0)", "tqdm (>=4.7.4,<5.0.0dev)", "opentelemetry-api (0.9b0)", "opentelemetry-sdk (0.9b0)", "opentelemetry-instrumentation (0.9b0)", "pyarrow (>=0.16.0,<0.17.0dev)", "pyarrow (>=1.0.0,<2.0de)"] -bqstorage = ["google-cloud-bigquery-storage (>=1.0.0,<2.0.0dev)", "grpcio (>=1.32.0,<2.0dev)", "pyarrow (>=1.0.0,<2.0dev)"] -fastparquet = ["fastparquet", "python-snappy", "llvmlite (<=0.31.0)", "llvmlite (<=0.34.0)"] -opentelemetry = ["opentelemetry-api (0.9b0)", "opentelemetry-sdk (0.9b0)", "opentelemetry-instrumentation (0.9b0)"] -pandas = ["pandas (>=0.23.0)"] -pyarrow = ["pyarrow (>=0.16.0,<0.17.0dev)", "pyarrow (>=1.0.0,<2.0de)"] +all = ["google-cloud-bigquery-storage (>=2.0.0,<3.0.0dev)", "grpcio (>=1.38.1,<2.0dev)", "pyarrow (>=3.0.0,<7.0dev)", "geopandas (>=0.9.0,<1.0dev)", "Shapely (>=1.6.0,<2.0dev)", "pandas (>=0.24.2)", "tqdm (>=4.7.4,<5.0.0dev)", "opentelemetry-api (>=0.11b0)", "opentelemetry-sdk (>=0.11b0)", "opentelemetry-instrumentation (>=0.11b0)"] +bignumeric_type = ["pyarrow (>=3.0.0,<7.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.0.0,<3.0.0dev)", "grpcio (>=1.38.1,<2.0dev)", "pyarrow (>=3.0.0,<7.0dev)"] +geopandas = ["geopandas (>=0.9.0,<1.0dev)", "Shapely (>=1.6.0,<2.0dev)"] +opentelemetry = ["opentelemetry-api (>=0.11b0)", "opentelemetry-sdk (>=0.11b0)", "opentelemetry-instrumentation (>=0.11b0)"] +pandas = ["pandas (>=0.24.2)", "pyarrow (>=3.0.0,<7.0dev)"] tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] -category = "main" -description = "BigQuery Storage API API client library" name = "google-cloud-bigquery-storage" +version = "1.1.0" +description = "BigQuery Storage API API client library" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.0" [package.dependencies] -[package.dependencies.google-api-core] -extras = ["grpc"] -version = ">=1.14.0,<2.0.0dev" +google-api-core = {version = ">=1.14.0,<2.0.0dev", extras = ["grpc"]} [package.extras] fastavro = ["fastavro (>=0.21.2)"] @@ -336,41 +321,44 @@ pandas = ["pandas (>=0.17.1)"] pyarrow = ["pyarrow (>=0.15.0)"] [[package]] -category = "main" -description = "Google Cloud API client core library" name = "google-cloud-core" +version = "2.2.2" +description = "Google Cloud API client core library" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.4.1" +python-versions = ">=3.6" [package.dependencies] -google-api-core = ">=1.19.0,<2.0.0dev" +google-api-core = ">=1.21.0,<3.0.0dev" +google-auth = ">=1.24.0,<3.0dev" [package.extras] grpc = ["grpcio (>=1.8.2,<2.0dev)"] [[package]] -category = "main" -description = "Google Cloud Storage API client library" name = "google-cloud-storage" +version = "1.42.3" +description = "Google Cloud Storage API client library" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.31.2" +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" [package.dependencies] -google-auth = ">=1.11.0,<2.0dev" -google-cloud-core = ">=1.4.1,<2.0dev" -google-resumable-media = ">=1.0.0,<2.0dev" +google-api-core = {version = ">=1.29.0,<3.0dev", markers = "python_version >= \"3.6\""} +google-auth = {version = ">=1.25.0,<3.0dev", markers = "python_version >= \"3.6\""} +google-cloud-core = {version = ">=1.6.0,<3.0dev", markers = "python_version >= \"3.6\""} +google-resumable-media = {version = ">=1.3.0,<3.0dev", markers = "python_version >= \"3.6\""} +protobuf = {version = "*", markers = "python_version >= \"3.6\""} requests = ">=2.18.0,<3.0.0dev" +six = "*" [[package]] -category = "main" -description = "A python wrapper of the C library 'Google CRC32C'" -marker = "python_version >= \"3.5\"" name = "google-crc32c" +version = "1.0.0" +description = "A python wrapper of the C library 'Google CRC32C'" +category = "main" optional = false python-versions = ">=3.5" -version = "1.0.0" [package.dependencies] cffi = ">=1.0.0" @@ -379,30 +367,27 @@ cffi = ">=1.0.0" testing = ["pytest"] [[package]] -category = "main" -description = "Utilities for Google Media Downloads and Resumable Uploads" name = "google-resumable-media" +version = "2.1.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" -version = "1.0.0" +python-versions = ">= 3.6" [package.dependencies] -six = "*" - -[package.dependencies.google-crc32c] -python = ">=3.5" -version = ">=1.0,<2.0dev" +google-crc32c = ">=1.0,<2.0dev" [package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)"] requests = ["requests (>=2.18.0,<3.0.0dev)"] [[package]] -category = "main" -description = "Common protobufs used in Google APIs" name = "googleapis-common-protos" +version = "1.52.0" +description = "Common protobufs used in Google APIs" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.52.0" [package.dependencies] protobuf = ">=3.6.0" @@ -411,47 +396,34 @@ protobuf = ">=3.6.0" grpc = ["grpcio (>=1.0.0)"] [[package]] -category = "main" -description = "HTTP/2-based RPC framework" name = "grpcio" +version = "1.43.0" +description = "HTTP/2-based RPC framework" +category = "main" optional = false -python-versions = "*" -version = "1.32.0" +python-versions = ">=3.6" [package.dependencies] six = ">=1.5.2" [package.extras] -protobuf = ["grpcio-tools (>=1.32.0)"] +protobuf = ["grpcio-tools (>=1.43.0)"] [[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "2.10" - -[[package]] +description = "Internationalized Domain Names in Applications (IDNA)" category = "main" -description = "Patch ssl.match_hostname for Unicode(idna) domains support" -marker = "python_version >= \"3.6\" and python_version < \"3.7\"" -name = "idna-ssl" optional = false -python-versions = "*" -version = "1.1.0" - -[package.dependencies] -idna = ">=2.0" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] -category = "dev" -description = "Read metadata from Python packages" -marker = "python_version < \"3.8\"" name = "importlib-metadata" +version = "2.0.0" +description = "Read metadata from Python packages" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "2.0.0" [package.dependencies] zipp = ">=0.5" @@ -461,23 +433,23 @@ docs = ["sphinx", "rst.linker"] testing = ["packaging", "pep517", "importlib-resources (>=1.3)"] [[package]] -category = "dev" -description = "iniconfig: brain-dead simple config-ini parsing" name = "iniconfig" +version = "1.0.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = "*" -version = "1.0.1" [[package]] -category = "dev" -description = "IPython Kernel for Jupyter" name = "ipykernel" +version = "5.3.4" +description = "IPython Kernel for Jupyter" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.3.4" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "platform_system == \"Darwin\""} ipython = ">=5.0.0" jupyter-client = "*" tornado = ">=4.2" @@ -487,24 +459,23 @@ traitlets = ">=4.1.0" test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] [[package]] -category = "dev" -description = "IPython: Productive Interactive Computing" name = "ipython" +version = "7.16.1" +description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.6" -version = "7.16.1" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" jedi = ">=0.10" -pexpect = "*" +pexpect = {version = "*", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" -setuptools = ">=18.5" traitlets = ">=4.2" [package.extras] @@ -519,56 +490,53 @@ qtconsole = ["qtconsole"] test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"] [[package]] -category = "dev" -description = "Vestigial utilities from IPython" name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "dev" optional = false python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "IPython HTML widgets for Jupyter" name = "ipywidgets" +version = "7.5.1" +description = "IPython HTML widgets for Jupyter" +category = "dev" optional = false python-versions = "*" -version = "7.5.1" [package.dependencies] ipykernel = ">=4.5.1" +ipython = {version = ">=4.0.0", markers = "python_version >= \"3.3\""} nbformat = ">=4.2.0" traitlets = ">=4.3.1" widgetsnbextension = ">=3.5.0,<3.6.0" -[package.dependencies.ipython] -python = ">=3.3" -version = ">=4.0.0" - [package.extras] test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] [[package]] -category = "dev" -description = "An autocompletion tool for Python that can be used for text editors." name = "jedi" +version = "0.17.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.17.2" [package.dependencies] parso = ">=0.7.0,<0.8.0" [package.extras] -qa = ["flake8 (3.7.9)"] +qa = ["flake8 (==3.7.9)"] testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] [[package]] -category = "main" -description = "A very fast and expressive template engine." name = "jinja2" +version = "2.11.2" +description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.11.2" [package.dependencies] MarkupSafe = ">=0.23" @@ -577,34 +545,30 @@ MarkupSafe = ">=0.23" i18n = ["Babel (>=0.8)"] [[package]] -category = "dev" -description = "An implementation of JSON Schema validation for Python" name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "dev" optional = false python-versions = "*" -version = "3.2.0" [package.dependencies] attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} pyrsistent = ">=0.14.0" -setuptools = "*" six = ">=1.11.0" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - [package.extras] format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] [[package]] -category = "dev" -description = "Jupyter metapackage. Install all the Jupyter components in one go." name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +category = "dev" optional = false python-versions = "*" -version = "1.0.0" [package.dependencies] ipykernel = "*" @@ -615,12 +579,12 @@ notebook = "*" qtconsole = "*" [[package]] -category = "dev" -description = "Jupyter protocol implementation and client libraries" name = "jupyter-client" +version = "6.1.7" +description = "Jupyter protocol implementation and client libraries" +category = "dev" optional = false python-versions = ">=3.5" -version = "6.1.7" [package.dependencies] jupyter-core = ">=4.6.0" @@ -633,12 +597,12 @@ traitlets = "*" test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] [[package]] -category = "dev" -description = "Jupyter terminal console" name = "jupyter-console" +version = "6.2.0" +description = "Jupyter terminal console" +category = "dev" optional = false python-versions = ">=3.6" -version = "6.2.0" [package.dependencies] ipykernel = "*" @@ -651,76 +615,67 @@ pygments = "*" test = ["pexpect"] [[package]] -category = "dev" -description = "Jupyter core package. A base package on which Jupyter projects rely." name = "jupyter-core" +version = "4.6.3" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" optional = false python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,!=3.4,>=2.7" -version = "4.6.3" [package.dependencies] -pywin32 = ">=1.0" +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} traitlets = "*" [[package]] -category = "dev" -description = "Pygments theme using JupyterLab CSS variables" name = "jupyterlab-pygments" +version = "0.1.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "dev" optional = false python-versions = "*" -version = "0.1.2" [package.dependencies] pygments = ">=2.4.1,<3" [[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" [[package]] -category = "dev" -description = "The fastest markdown parser in pure Python" name = "mistune" +version = "0.8.4" +description = "The fastest markdown parser in pure Python" +category = "dev" optional = false python-versions = "*" -version = "0.8.4" [[package]] -category = "dev" -description = "More routines for operating on iterables, beyond itertools" name = "more-itertools" -optional = false -python-versions = ">=3.5" version = "8.5.0" - -[[package]] -category = "main" -description = "multidict implementation" -marker = "python_version >= \"3.6\"" -name = "multidict" +description = "More routines for operating on iterables, beyond itertools" +category = "dev" optional = false python-versions = ">=3.5" -version = "4.7.6" [[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "dev" -description = "A client library for executing notebooks. Formally nbconvert's ExecutePreprocessor." name = "nbclient" +version = "0.5.0" +description = "A client library for executing notebooks. Formally nbconvert's ExecutePreprocessor." +category = "dev" optional = false python-versions = ">=3.6" -version = "0.5.0" [package.dependencies] async-generator = "*" @@ -735,12 +690,12 @@ sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] [[package]] -category = "dev" -description = "Converting Jupyter Notebooks" name = "nbconvert" +version = "6.0.6" +description = "Converting Jupyter Notebooks" +category = "dev" optional = false python-versions = ">=3.6" -version = "6.0.6" [package.dependencies] bleach = "*" @@ -758,19 +713,19 @@ testpath = "*" traitlets = ">=4.2" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)"] -webpdf = ["pyppeteer (0.2.2)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] +webpdf = ["pyppeteer (==0.2.2)"] [[package]] -category = "dev" -description = "The Jupyter Notebook format" name = "nbformat" +version = "5.0.7" +description = "The Jupyter Notebook format" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.0.7" [package.dependencies] ipython-genutils = "*" @@ -782,23 +737,22 @@ traitlets = ">=4.1" test = ["pytest", "pytest-cov", "testpath"] [[package]] -category = "dev" -description = "Patch asyncio to allow nested event loops" name = "nest-asyncio" +version = "1.4.1" +description = "Patch asyncio to allow nested event loops" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.4.1" [[package]] -category = "dev" -description = "A web-based notebook environment for interactive computing" name = "notebook" +version = "6.1.4" +description = "A web-based notebook environment for interactive computing" +category = "dev" optional = false python-versions = ">=3.5" -version = "6.1.4" [package.dependencies] -Send2Trash = "*" argon2-cffi = "*" ipykernel = "*" ipython-genutils = "*" @@ -809,6 +763,7 @@ nbconvert = "*" nbformat = "*" prometheus-client = "*" pyzmq = ">=17" +Send2Trash = "*" terminado = ">=0.8.3" tornado = ">=5.0" traitlets = ">=4.2.1" @@ -818,20 +773,20 @@ docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt"] test = ["nose", "coverage", "requests", "nose-warnings-filters", "nbval", "nose-exclude", "selenium", "pytest", "pytest-cov", "requests-unixsocket"] [[package]] -category = "main" -description = "NumPy is the fundamental package for array computing with Python." name = "numpy" +version = "1.19.2" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" optional = false python-versions = ">=3.6" -version = "1.19.2" [[package]] -category = "main" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" name = "oauthlib" +version = "3.1.0" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "3.1.0" [package.extras] rsa = ["cryptography"] @@ -839,40 +794,40 @@ signals = ["blinker"] signedtoken = ["cryptography", "pyjwt (>=1.0.0)"] [[package]] -category = "dev" -description = "Core utilities for Python packages" name = "packaging" +version = "20.4" +description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" six = "*" [[package]] -category = "main" -description = "Powerful data structures for data analysis, time series, and statistics" name = "pandas" +version = "1.2.4" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = false -python-versions = ">=3.5.3" -version = "0.25.3" +python-versions = ">=3.7.1" [package.dependencies] -numpy = ">=1.13.3" -python-dateutil = ">=2.6.1" -pytz = ">=2017.2" +numpy = ">=1.16.5" +python-dateutil = ">=2.7.3" +pytz = ">=2017.3" [package.extras] -test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] +test = ["pytest (>=5.0.1)", "pytest-xdist", "hypothesis (>=3.58)"] [[package]] -category = "main" -description = "Pandas interface to Google BigQuery" name = "pandas-gbq" +version = "0.13.2" +description = "Pandas interface to Google BigQuery" +category = "main" optional = false python-versions = ">=3.5" -version = "0.13.2" [package.dependencies] google-auth = "*" @@ -880,213 +835,245 @@ google-auth-oauthlib = "*" google-cloud-bigquery = ">=1.11.1" pandas = ">=0.19.0" pydata-google-auth = "*" -setuptools = "*" [package.extras] tqdm = ["tqdm (>=4.23.0)"] [[package]] -category = "dev" -description = "Utilities for writing pandoc filters in python" +name = "pandavro" +version = "1.6.0" +description = "The interface between Avro and pandas DataFrame" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +fastavro = ">=0.14.11" +numpy = ">=1.7.0" +pandas = ">=1.1.5" +six = ">=1.9" + +[package.extras] +tests = ["pytest"] + +[[package]] name = "pandocfilters" +version = "1.4.2" +description = "Utilities for writing pandoc filters in python" +category = "dev" optional = false python-versions = "*" -version = "1.4.2" [[package]] -category = "dev" -description = "A Python Parser" name = "parso" +version = "0.7.1" +description = "A Python Parser" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.7.1" [package.extras] testing = ["docopt", "pytest (>=3.0.7)"] [[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." name = "pathspec" +version = "0.8.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.0" [[package]] -category = "dev" -description = "Pexpect allows easy control of interactive console applications." -marker = "python_version >= \"3.3\" and sys_platform != \"win32\" or sys_platform != \"win32\"" name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" -version = "4.8.0" [package.dependencies] ptyprocess = ">=0.5" [[package]] -category = "dev" -description = "Tiny 'shelve'-like database with concurrency support" name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" optional = false python-versions = "*" -version = "0.7.5" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "Python client for the Prometheus monitoring system." name = "prometheus-client" +version = "0.8.0" +description = "Python client for the Prometheus monitoring system." +category = "dev" optional = false python-versions = "*" -version = "0.8.0" [package.extras] twisted = ["twisted"] [[package]] -category = "dev" -description = "Library for building powerful interactive command lines in Python" name = "prompt-toolkit" +version = "3.0.3" +description = "Library for building powerful interactive command lines in Python" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.0.3" [package.dependencies] wcwidth = "*" [[package]] +name = "proto-plus" +version = "1.19.9" +description = "Beautiful, Pythonic protocol buffers." category = "main" -description = "Protocol Buffers" -name = "protobuf" optional = false -python-versions = "*" -version = "3.13.0" +python-versions = ">=3.6" [package.dependencies] -setuptools = "*" -six = ">=1.9" +protobuf = ">=3.19.0" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.22.2)"] + +[[package]] +name = "protobuf" +version = "3.19.4" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = ">=3.5" [[package]] -category = "dev" -description = "Run a subprocess in a pseudo terminal" -marker = "sys_platform != \"win32\" or os_name != \"nt\" or python_version >= \"3.3\" and sys_platform != \"win32\"" name = "ptyprocess" +version = "0.6.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" -version = "0.6.0" [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" +version = "1.9.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.9.0" [[package]] -category = "main" -description = "PyYAML-based module to produce pretty and readable YAML-serialized data" name = "pyaml" +version = "20.4.0" +description = "PyYAML-based module to produce pretty and readable YAML-serialized data" +category = "main" optional = false python-versions = "*" -version = "20.4.0" [package.dependencies] PyYAML = "*" [[package]] +name = "pyarrow" +version = "6.0.0" +description = "Python library for Apache Arrow" category = "main" -description = "ASN.1 types and codecs" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +numpy = ">=1.16.6" + +[[package]] name = "pyasn1" +version = "0.4.8" +description = "ASN.1 types and codecs" +category = "main" optional = false python-versions = "*" -version = "0.4.8" [[package]] -category = "main" -description = "A collection of ASN.1-based protocols modules." name = "pyasn1-modules" +version = "0.2.8" +description = "A collection of ASN.1-based protocols modules." +category = "main" optional = false python-versions = "*" -version = "0.2.8" [package.dependencies] pyasn1 = ">=0.4.6,<0.5.0" [[package]] -category = "main" -description = "C parser in Python" -marker = "python_version >= \"3.5\"" name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.20" [[package]] -category = "main" -description = "PyData helpers for authenticating to Google APIs" name = "pydata-google-auth" +version = "1.1.0" +description = "PyData helpers for authenticating to Google APIs" +category = "main" optional = false python-versions = "*" -version = "1.1.0" [package.dependencies] google-auth = "*" google-auth-oauthlib = "*" -setuptools = "*" [[package]] -category = "dev" -description = "Pygments is a syntax highlighting package written in Python." name = "pygments" +version = "2.7.1" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.5" -version = "2.7.1" [[package]] -category = "dev" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "dev" -description = "Persistent/Functional/Immutable data structures" name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.17.3" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "6.0.2" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "6.0.2" [package.dependencies] -atomicwrites = ">=1.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" more-itertools = ">=4.0.0" packaging = "*" @@ -1094,74 +1081,82 @@ pluggy = ">=0.12,<1.0" py = ">=1.8.2" toml = "*" -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" - [package.extras] -checkqa_mypy = ["mypy (0.780)"] +checkqa_mypy = ["mypy (==0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "main" -description = "Extensions to the standard Python datetime module" name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.1" [package.dependencies] six = ">=1.5" [[package]] +name = "python-slugify" +version = "5.0.2" +description = "A Python Slugify application that handles Unicode" category = "main" -description = "World timezone definitions, modern and historical" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + +[[package]] name = "pytz" +version = "2020.1" +description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" -version = "2020.1" [[package]] -category = "dev" -description = "Python for Window Extensions" -marker = "sys_platform == \"win32\"" name = "pywin32" +version = "228" +description = "Python for Window Extensions" +category = "dev" optional = false python-versions = "*" -version = "228" [[package]] -category = "dev" -description = "Python bindings for the winpty library" -marker = "os_name == \"nt\"" name = "pywinpty" +version = "0.5.7" +description = "Python bindings for the winpty library" +category = "dev" optional = false python-versions = "*" -version = "0.5.7" [[package]] -category = "main" -description = "YAML parser and emitter for Python" name = "pyyaml" +version = "5.3.1" +description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3.1" [[package]] -category = "dev" -description = "Python bindings for 0MQ" name = "pyzmq" +version = "19.0.2" +description = "Python bindings for 0MQ" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" -version = "19.0.2" [[package]] -category = "dev" -description = "Jupyter Qt console" name = "qtconsole" +version = "4.7.7" +description = "Jupyter Qt console" +category = "dev" optional = false python-versions = "*" -version = "4.7.7" [package.dependencies] ipykernel = ">=4.1" @@ -1178,28 +1173,28 @@ doc = ["Sphinx (>=1.3)"] test = ["pytest", "mock"] [[package]] -category = "dev" -description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5, PyQt4 and PySide) and additional custom QWidgets." name = "qtpy" +version = "1.9.0" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5, PyQt4 and PySide) and additional custom QWidgets." +category = "dev" optional = false python-versions = "*" -version = "1.9.0" [[package]] -category = "dev" -description = "Alternative regular expression module, to replace re." name = "regex" +version = "2020.9.27" +description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = "*" -version = "2020.9.27" [[package]] -category = "main" -description = "Python HTTP for Humans." name = "requests" +version = "2.24.0" +description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" @@ -1209,106 +1204,147 @@ urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] -category = "main" -description = "OAuthlib authentication support for Requests." name = "requests-oauthlib" +version = "1.3.0" +description = "OAuthlib authentication support for Requests." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.3.0" [package.dependencies] oauthlib = ">=3.0.0" requests = ">=2.0.0" [package.extras] -rsa = ["oauthlib (>=3.0.0)"] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] -category = "main" -description = "Pure-Python RSA implementation" -marker = "python_version >= \"3.5\"" name = "rsa" +version = "4.6" +description = "Pure-Python RSA implementation" +category = "main" optional = false python-versions = ">=3.5, <4" -version = "4.6" [package.dependencies] pyasn1 = ">=0.1.3" [[package]] -category = "dev" -description = "Send file to trash natively under Mac OS X, Windows and Linux." +name = "ruamel.yaml" +version = "0.17.10" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.10\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel.yaml.clib" +version = "0.2.6" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] name = "send2trash" +version = "1.5.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "dev" optional = false python-versions = "*" -version = "1.5.0" [[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" [[package]] -category = "dev" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." name = "terminado" +version = "0.9.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "dev" optional = false python-versions = ">=3.6" -version = "0.9.1" [package.dependencies] -ptyprocess = "*" -pywinpty = ">=0.5" +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=0.5", markers = "os_name == \"nt\""} tornado = ">=4" [[package]] -category = "dev" -description = "Test utilities for code working with files and commands" name = "testpath" +version = "0.4.4" +description = "Test utilities for code working with files and commands" +category = "dev" optional = false python-versions = "*" -version = "0.4.4" [package.extras] test = ["pathlib2"] [[package]] -category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" -name = "toml" +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +category = "main" optional = false python-versions = "*" + +[[package]] +name = "toml" version = "0.10.1" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = "*" [[package]] -category = "main" -description = "Style preserving TOML library" name = "tomlkit" +version = "0.7.0" +description = "Style preserving TOML library" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.7.0" [[package]] -category = "dev" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." name = "tornado" +version = "6.0.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" optional = false python-versions = ">= 3.5" -version = "6.0.4" [[package]] -category = "dev" -description = "Traitlets Python config system" +name = "tqdm" +version = "4.50.2" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "argopt", "pydoc-markdown"] + +[[package]] name = "traitlets" +version = "4.3.3" +description = "Traitlets Python config system" +category = "dev" optional = false python-versions = "*" -version = "4.3.3" [package.dependencies] decorator = "*" @@ -1319,111 +1355,79 @@ six = "*" test = ["pytest", "mock"] [[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" +version = "1.4.1" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = "*" -version = "1.4.1" [[package]] -category = "main" -description = "Backported and Experimental Type Hints for Python 3.5+" name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "dev" optional = false python-versions = "*" -version = "3.7.4.3" [[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.25.10" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] -category = "dev" -description = "Measures the displayed width of unicode strings in a terminal" name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" -version = "0.2.5" [[package]] -category = "dev" -description = "Character encoding aliases for legacy web content" name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "dev" optional = false python-versions = "*" -version = "0.5.1" [[package]] -category = "dev" -description = "IPython HTML widgets for Jupyter" name = "widgetsnbextension" +version = "3.5.1" +description = "IPython HTML widgets for Jupyter" +category = "dev" optional = false python-versions = "*" -version = "3.5.1" [package.dependencies] notebook = ">=4.4.1" [[package]] -category = "main" -description = "Yet another URL library" -marker = "python_version >= \"3.6\"" -name = "yarl" -optional = false -python-versions = ">=3.5" -version = "1.6.0" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - -[package.dependencies.typing-extensions] -python = "<3.8" -version = ">=3.7.4" - -[[package]] -category = "dev" -description = "Backport of pathlib-compatible object wrapper for zip files" -marker = "python_version < \"3.8\"" name = "zipp" +version = "3.2.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" optional = false python-versions = ">=3.6" -version = "3.2.0" [package.extras] docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] -content-hash = "fe237c99569cb1da9488dc2936bc392b8a2b858b34d5499bfb9b7f9f23df5572" -lock-version = "1.0" -python-versions = "^3.6" +lock-version = "1.1" +python-versions = ">=3.7.1,<3.11" +content-hash = "d07b2c37c84d7ae566c2598eb8eebd9f3ec4c918ffdcdddd45195d30b88e56e9" [metadata.files] -aiohttp = [ - {file = "aiohttp-3.6.2-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:1e984191d1ec186881ffaed4581092ba04f7c61582a177b187d3a2f07ed9719e"}, - {file = "aiohttp-3.6.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:50aaad128e6ac62e7bf7bd1f0c0a24bc968a0c0590a726d5a955af193544bcec"}, - {file = "aiohttp-3.6.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:65f31b622af739a802ca6fd1a3076fd0ae523f8485c52924a89561ba10c49b48"}, - {file = "aiohttp-3.6.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ae55bac364c405caa23a4f2d6cfecc6a0daada500274ffca4a9230e7129eac59"}, - {file = "aiohttp-3.6.2-cp36-cp36m-win32.whl", hash = "sha256:344c780466b73095a72c616fac5ea9c4665add7fc129f285fbdbca3cccf4612a"}, - {file = "aiohttp-3.6.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4c6efd824d44ae697814a2a85604d8e992b875462c6655da161ff18fd4f29f17"}, - {file = "aiohttp-3.6.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:2f4d1a4fdce595c947162333353d4a44952a724fba9ca3205a3df99a33d1307a"}, - {file = "aiohttp-3.6.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6206a135d072f88da3e71cc501c59d5abffa9d0bb43269a6dcd28d66bfafdbdd"}, - {file = "aiohttp-3.6.2-cp37-cp37m-win32.whl", hash = "sha256:b778ce0c909a2653741cb4b1ac7015b5c130ab9c897611df43ae6a58523cb965"}, - {file = "aiohttp-3.6.2-cp37-cp37m-win_amd64.whl", hash = "sha256:32e5f3b7e511aa850829fbe5aa32eb455e5534eaa4b1ce93231d00e2f76e5654"}, - {file = "aiohttp-3.6.2-py3-none-any.whl", hash = "sha256:460bd4237d2dbecc3b5ed57e122992f60188afe46e7319116da5eb8a9dfedba4"}, - {file = "aiohttp-3.6.2.tar.gz", hash = "sha256:259ab809ff0727d0e834ac5e8a283dc5e3e0ecc30c4d80b3cd17a4139ce1f326"}, -] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, @@ -1449,15 +1453,17 @@ argon2-cffi = [ {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win32.whl", hash = "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32"}, + {file = "argon2_cffi-20.1.0-pp36-pypy36_pp73-macosx_10_7_x86_64.whl", hash = "sha256:b94042e5dcaa5d08cf104a54bfae614be502c6f44c9c89ad1535b2ebdaacbd4c"}, + {file = "argon2_cffi-20.1.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:8282b84ceb46b5b75c3a882b28856b8cd7e647ac71995e71b6705ec06fc232c3"}, + {file = "argon2_cffi-20.1.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3aa804c0e52f208973845e8b10c70d8957c9e5a666f702793256242e9167c4e0"}, + {file = "argon2_cffi-20.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:36320372133a003374ef4275fbfce78b7ab581440dfca9f9471be3dd9a522428"}, ] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, ] -async-timeout = [ - {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, - {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -1527,18 +1533,17 @@ chardet = [ {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, ] +ckanapi = [ + {file = "ckanapi-4.6.tar.gz", hash = "sha256:35361965bfb38c8e146d7229f2d7c3aaf1c0f2ef547de4239b4d38931bf081d2"}, +] click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, ] colorama = [ {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, ] -dataclasses = [ - {file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"}, - {file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"}, -] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"}, @@ -1547,37 +1552,58 @@ defusedxml = [ {file = "defusedxml-0.6.0-py2.py3-none-any.whl", hash = "sha256:6687150770438374ab581bb7a1b327a847dd9c5749e396102de3fad4e8a3ef93"}, {file = "defusedxml-0.6.0.tar.gz", hash = "sha256:f684034d135af4c6cbb949b8a4d2ed61634515257a67299e5f940fbaa34377f5"}, ] +docopt = [ + {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +] entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, ] +fastavro = [ + {file = "fastavro-1.4.9-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:2e64a77c529b638e89a879ff0211debfab5b2d114c26a2af29c81f6b013f395a"}, + {file = "fastavro-1.4.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fc9c95b7c1d59c5a2d29be21075870a122152cf927d84587dafc96da6b2ac3d"}, + {file = "fastavro-1.4.9-cp310-cp310-win_amd64.whl", hash = "sha256:927fd6148a8dd9646c129c0a0e8571aea829abc3cba04a3d5a4010a866934f4c"}, + {file = "fastavro-1.4.9-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:000b70c5109a61bdbfddeb2821a506de8f5333f243c608cbced61d44657d6c2f"}, + {file = "fastavro-1.4.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a77a1b5347a06416e236c77027c750aaeda29ef8189aa456eb2a2571274b43"}, + {file = "fastavro-1.4.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce2c7747ce958115388872db0756d3eeb0d796084eea9b46dc3758ef32c4d952"}, + {file = "fastavro-1.4.9-cp37-cp37m-win_amd64.whl", hash = "sha256:d6ccb77604903a0308316e696bb65a8943361af5f757d10985689656c9bce6ed"}, + {file = "fastavro-1.4.9-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9a6ada2d6e133a2319438248c2e023b6735747b249c5a79d5f08f9d431e5d226"}, + {file = "fastavro-1.4.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7537e4df7782b03b9761e9338cef9fc7bfcc41100ab93c36c5c60fa568e724a"}, + {file = "fastavro-1.4.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a9cd6d8471beb4020b4126fd04150ed7295f74ae7234d0dc9205b55c193851e"}, + {file = "fastavro-1.4.9-cp38-cp38-win_amd64.whl", hash = "sha256:fa9d8b47e0533c84152332ad491bb63bbae76a8a7a0df1caa821e0cbebf0fb70"}, + {file = "fastavro-1.4.9-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:3759bdc77009ee1e2e76fc9f58b951c05c00a8600ef9ddbff59fee3cb0c9e235"}, + {file = "fastavro-1.4.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b98ef2bdb123b95945aa6d69d6a7d79f211df3274b2dd7786da7852ddec964d0"}, + {file = "fastavro-1.4.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32b804aa6920d80c0e94e1180d480f28f56c4b901849bd80ed180851752b5ce6"}, + {file = "fastavro-1.4.9-cp39-cp39-win_amd64.whl", hash = "sha256:f9b04acaf06b16218b47985e92d8daa98c1116d58f3cff81a5b3cf39cef9afc0"}, + {file = "fastavro-1.4.9.tar.gz", hash = "sha256:be3fec387eb2cdc9627060b5ae0690542c687dddc951b63fa11203553769ae5e"}, +] google-api-core = [ - {file = "google-api-core-1.22.2.tar.gz", hash = "sha256:779107f17e0fef8169c5239d56a8fbff03f9f72a3893c0c9e5842ec29dfedd54"}, - {file = "google_api_core-1.22.2-py2.py3-none-any.whl", hash = "sha256:67e33a852dcca7cb7eff49abc35c8cc2c0bb8ab11397dc8306d911505cae2990"}, + {file = "google-api-core-1.31.5.tar.gz", hash = "sha256:85d2074f2c8f9c07e614d7f978767d71ceb7d40647814ef4236d3a0ef671ee75"}, + {file = "google_api_core-1.31.5-py2.py3-none-any.whl", hash = "sha256:6815207a8b422e9da42c200681603f304b25f98c98b675a9db9fdc3717e44280"}, ] google-auth = [ - {file = "google-auth-1.22.0.tar.gz", hash = "sha256:a73e6fb6d232ed1293ef9a5301e6f8aada7880d19c65d7f63e130dc50ec05593"}, - {file = "google_auth-1.22.0-py2.py3-none-any.whl", hash = "sha256:e86e72142d939a8d90a772947268aacc127ab7a1d1d6f3e0fecca7a8d74d8257"}, + {file = "google-auth-1.35.0.tar.gz", hash = "sha256:b7033be9028c188ee30200b204ea00ed82ea1162e8ac1df4aa6ded19a191d88e"}, + {file = "google_auth-1.35.0-py2.py3-none-any.whl", hash = "sha256:997516b42ecb5b63e8d80f5632c1a61dddf41d2a4c2748057837e06e00014258"}, ] google-auth-oauthlib = [ {file = "google-auth-oauthlib-0.4.1.tar.gz", hash = "sha256:88d2cd115e3391eb85e1243ac6902e76e77c5fe438b7276b297fbe68015458dd"}, {file = "google_auth_oauthlib-0.4.1-py2.py3-none-any.whl", hash = "sha256:a92a0f6f41a0fb6138454fbc02674e64f89d82a244ea32f98471733c8ef0e0e1"}, ] google-cloud-bigquery = [ - {file = "google-cloud-bigquery-1.28.0.tar.gz", hash = "sha256:9784cff71d6a46ce202748169f9c7e38fc99d6babbb2f3cdc540475d11f572b9"}, - {file = "google_cloud_bigquery-1.28.0-py2.py3-none-any.whl", hash = "sha256:9266e989531f290d3b836dc7b308ac22b350c4d664af19325bd0102261231b71"}, + {file = "google-cloud-bigquery-2.30.1.tar.gz", hash = "sha256:4e3b5e3dcc475d5a601d84872ac0b63e059540be2251b1c4165c51106d572855"}, + {file = "google_cloud_bigquery-2.30.1-py2.py3-none-any.whl", hash = "sha256:c62d601aa0f62388e1909d11de40db7597b02fb8602ccb7f21a3ac2a0997495b"}, ] google-cloud-bigquery-storage = [ {file = "google-cloud-bigquery-storage-1.1.0.tar.gz", hash = "sha256:c92533cedbb672f1a35555c112d4d5cccb9f8f6d0e98a604fbf98223773adad3"}, {file = "google_cloud_bigquery_storage-1.1.0-py2.py3-none-any.whl", hash = "sha256:fc543e9d2343d34c043ad48984333ba84de10be31b7af8435548aaf8555507c4"}, ] google-cloud-core = [ - {file = "google-cloud-core-1.4.1.tar.gz", hash = "sha256:613e56f164b6bee487dd34f606083a0130f66f42f7b10f99730afdf1630df507"}, - {file = "google_cloud_core-1.4.1-py2.py3-none-any.whl", hash = "sha256:4c9e457fcfc026fdde2e492228f04417d4c717fb0f29f070122fb0ab89e34ebd"}, + {file = "google-cloud-core-2.2.2.tar.gz", hash = "sha256:7d19bf8868b410d0bdf5a03468a3f3f2db233c0ee86a023f4ecc2b7a4b15f736"}, + {file = "google_cloud_core-2.2.2-py2.py3-none-any.whl", hash = "sha256:d9cffaf86df6a876438d4e8471183bbe404c9a15de9afe60433bc7dce8cb4252"}, ] google-cloud-storage = [ - {file = "google-cloud-storage-1.31.2.tar.gz", hash = "sha256:74bbb5b2d0b249de4a52f561435d0c3570ddc19b249653ae588ec0abcc3c81e6"}, - {file = "google_cloud_storage-1.31.2-py2.py3-none-any.whl", hash = "sha256:e2a2533b5e368365e84d2acaa53b75266a09802fc1c8bda52e3212335eccb6bd"}, + {file = "google-cloud-storage-1.42.3.tar.gz", hash = "sha256:7754d4dcaa45975514b404ece0da2bb4292acbc67ca559a69e12a19d54fcdb06"}, + {file = "google_cloud_storage-1.42.3-py2.py3-none-any.whl", hash = "sha256:71ee3a0dcf2c139f034a054181cd7658f1ec8f12837d2769c450a8a00fcd4c6d"}, ] google-crc32c = [ {file = "google-crc32c-1.0.0.tar.gz", hash = "sha256:9439b960b6ecd847557675d130fc3626d762bf535da595c20a6949a705fb3eae"}, @@ -1600,61 +1626,63 @@ google-crc32c = [ {file = "google_crc32c-1.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:41fb6c22cd72ae3db4d98d28dbb768d53397c8fc3cb8ab945fd434e842e622d4"}, ] google-resumable-media = [ - {file = "google-resumable-media-1.0.0.tar.gz", hash = "sha256:173acc6bade1480a529fa29c6c2717543ae2dc09d42e9461fdb86f39502efcf2"}, - {file = "google_resumable_media-1.0.0-py2.py3-none-any.whl", hash = "sha256:99b5ac33a75ddb25d5e6aad487b37ecb4fa18b1fbf3d1ad726e032c3d6fc9aff"}, + {file = "google-resumable-media-2.1.0.tar.gz", hash = "sha256:725b989e0dd387ef2703d1cc8e86217474217f4549593c477fd94f4024a0f911"}, + {file = "google_resumable_media-2.1.0-py2.py3-none-any.whl", hash = "sha256:cdc75ea0361e39704dc7df7da59fbd419e73c8bc92eac94d8a020d36baa9944b"}, ] googleapis-common-protos = [ {file = "googleapis-common-protos-1.52.0.tar.gz", hash = "sha256:560716c807117394da12cecb0a54da5a451b5cf9866f1d37e9a5e2329a665351"}, {file = "googleapis_common_protos-1.52.0-py2.py3-none-any.whl", hash = "sha256:c8961760f5aad9a711d37b675be103e0cc4e9a39327e0d6d857872f698403e24"}, ] grpcio = [ - {file = "grpcio-1.32.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3afb058b6929eba07dba9ae6c5b555aa1d88cb140187d78cc510bd72d0329f28"}, - {file = "grpcio-1.32.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a8004b34f600a8a51785e46859cd88f3386ef67cccd1cfc7598e3d317608c643"}, - {file = "grpcio-1.32.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e6786f6f7be0937614577edcab886ddce91b7c1ea972a07ef9972e9f9ecbbb78"}, - {file = "grpcio-1.32.0-cp27-cp27m-win32.whl", hash = "sha256:e467af6bb8f5843f5a441e124b43474715cfb3981264e7cd227343e826dcc3ce"}, - {file = "grpcio-1.32.0-cp27-cp27m-win_amd64.whl", hash = "sha256:1376a60f9bfce781b39973f100b5f67e657b5be479f2fd8a7d2a408fc61c085c"}, - {file = "grpcio-1.32.0-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:ce617e1c4a39131f8527964ac9e700eb199484937d7a0b3e52655a3ba50d5fb9"}, - {file = "grpcio-1.32.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:99bac0e2c820bf446662365df65841f0c2a55b0e2c419db86eaf5d162ddae73e"}, - {file = "grpcio-1.32.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6d869a3e8e62562b48214de95e9231c97c53caa7172802236cd5d60140d7cddd"}, - {file = "grpcio-1.32.0-cp35-cp35m-linux_armv7l.whl", hash = "sha256:182c64ade34c341398bf71ec0975613970feb175090760ab4f51d1e9a5424f05"}, - {file = "grpcio-1.32.0-cp35-cp35m-macosx_10_7_intel.whl", hash = "sha256:9c0d8f2346c842088b8cbe3e14985b36e5191a34bf79279ba321a4bf69bd88b7"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:4775bc35af9cd3b5033700388deac2e1d611fa45f4a8dcb93667d94cb25f0444"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:be98e3198ec765d0a1e27f69d760f69374ded8a33b953dcfe790127731f7e690"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:378fe80ec5d9353548eb2a8a43ea03747a80f2e387c4f177f2b3ff6c7d898753"}, - {file = "grpcio-1.32.0-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:f7d508691301027033215d3662dab7e178f54d5cca2329f26a71ae175d94b83f"}, - {file = "grpcio-1.32.0-cp35-cp35m-win32.whl", hash = "sha256:25959a651420dd4a6fd7d3e8dee53f4f5fd8c56336a64963428e78b276389a59"}, - {file = "grpcio-1.32.0-cp35-cp35m-win_amd64.whl", hash = "sha256:ac7028d363d2395f3d755166d0161556a3f99500a5b44890421ccfaaf2aaeb08"}, - {file = "grpcio-1.32.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:c31e8a219650ddae1cd02f5a169e1bffe66a429a8255d3ab29e9363c73003b62"}, - {file = "grpcio-1.32.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e28e4c0d4231beda5dee94808e3a224d85cbaba3cfad05f2192e6f4ec5318053"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f03dfefa9075dd1c6c5cc27b1285c521434643b09338d8b29e1d6a27b386aa82"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c4966d746dccb639ef93f13560acbe9630681c07f2b320b7ec03fe2c8f0a1f15"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:ec10d5f680b8e95a06f1367d73c5ddcc0ed04a3f38d6e4c9346988fb0cea2ffa"}, - {file = "grpcio-1.32.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:28677f057e2ef11501860a7bc15de12091d40b95dd0fddab3c37ff1542e6b216"}, - {file = "grpcio-1.32.0-cp36-cp36m-win32.whl", hash = "sha256:0f3f09269ffd3fded430cd89ba2397eabbf7e47be93983b25c187cdfebb302a7"}, - {file = "grpcio-1.32.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4396b1d0f388ae875eaf6dc05cdcb612c950fd9355bc34d38b90aaa0665a0d4b"}, - {file = "grpcio-1.32.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1ada89326a364a299527c7962e5c362dbae58c67b283fe8383c4d952b26565d5"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:1d384a61f96a1fc6d5d3e0b62b0a859abc8d4c3f6d16daba51ebf253a3e7df5d"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e811ce5c387256609d56559d944a974cc6934a8eea8c76e7c86ec388dc06192d"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:07b430fa68e5eecd78e2ad529ab80f6a234b55fc1b675fe47335ccbf64c6c6c8"}, - {file = "grpcio-1.32.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:0e3edd8cdb71809d2455b9dbff66b4dd3d36c321e64bfa047da5afdfb0db332b"}, - {file = "grpcio-1.32.0-cp37-cp37m-win32.whl", hash = "sha256:6f7947dad606c509d067e5b91a92b250aa0530162ab99e4737090f6b17eb12c4"}, - {file = "grpcio-1.32.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7cda998b7b551503beefc38db9be18c878cfb1596e1418647687575cdefa9273"}, - {file = "grpcio-1.32.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c58825a3d8634cd634d8f869afddd4d5742bdb59d594aea4cea17b8f39269a55"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:ef9bd7fdfc0a063b4ed0efcab7906df5cae9bbcf79d05c583daa2eba56752b00"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1ce6f5ff4f4a548c502d5237a071fa617115df58ea4b7bd41dac77c1ab126e9c"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f12900be4c3fd2145ba94ab0d80b7c3d71c9e6414cfee2f31b1c20188b5c281f"}, - {file = "grpcio-1.32.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:f53f2dfc8ff9a58a993e414a016c8b21af333955ae83960454ad91798d467c7b"}, - {file = "grpcio-1.32.0-cp38-cp38-win32.whl", hash = "sha256:5bddf9d53c8df70061916c3bfd2f468ccf26c348bb0fb6211531d895ed5e4c72"}, - {file = "grpcio-1.32.0-cp38-cp38-win_amd64.whl", hash = "sha256:14c0f017bfebbc18139551111ac58ecbde11f4bc375b73a53af38927d60308b6"}, - {file = "grpcio-1.32.0.tar.gz", hash = "sha256:01d3046fe980be25796d368f8fc5ff34b7cf5e1444f3789a017a7fe794465639"}, + {file = "grpcio-1.43.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:a4e786a8ee8b30b25d70ee52cda6d1dbba2a8ca2f1208d8e20ed8280774f15c8"}, + {file = "grpcio-1.43.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:af9c3742f6c13575c0d4147a8454da0ff5308c4d9469462ff18402c6416942fe"}, + {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fdac966699707b5554b815acc272d81e619dd0999f187cd52a61aef075f870ee"}, + {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e463b4aa0a6b31cf2e57c4abc1a1b53531a18a570baeed39d8d7b65deb16b7e"}, + {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11d05402e0ac3a284443d8a432d3dfc76a6bd3f7b5858cddd75617af2d7bd9b"}, + {file = "grpcio-1.43.0-cp310-cp310-win32.whl", hash = "sha256:c36f418c925a41fccada8f7ae9a3d3e227bfa837ddbfddd3d8b0ac252d12dda9"}, + {file = "grpcio-1.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:772b943f34374744f70236bbbe0afe413ed80f9ae6303503f85e2b421d4bca92"}, + {file = "grpcio-1.43.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:cbc9b83211d905859dcf234ad39d7193ff0f05bfc3269c364fb0d114ee71de59"}, + {file = "grpcio-1.43.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:fb7229fa2a201a0c377ff3283174ec966da8f9fd7ffcc9a92f162d2e7fc9025b"}, + {file = "grpcio-1.43.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:17b75f220ee6923338155b4fcef4c38802b9a57bc57d112c9599a13a03e99f8d"}, + {file = "grpcio-1.43.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:6620a5b751b099b3b25553cfc03dfcd873cda06f9bb2ff7e9948ac7090e20f05"}, + {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:1898f999383baac5fcdbdef8ea5b1ef204f38dc211014eb6977ac6e55944d738"}, + {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47b6821238d8978014d23b1132713dac6c2d72cbb561cf257608b1673894f90a"}, + {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80398e9fb598060fa41050d1220f5a2440fe74ff082c36dda41ac3215ebb5ddd"}, + {file = "grpcio-1.43.0-cp36-cp36m-win32.whl", hash = "sha256:0110310eff07bb69782f53b7a947490268c4645de559034c43c0a635612e250f"}, + {file = "grpcio-1.43.0-cp36-cp36m-win_amd64.whl", hash = "sha256:45401d00f2ee46bde75618bf33e9df960daa7980e6e0e7328047191918c98504"}, + {file = "grpcio-1.43.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:af78ac55933811e6a25141336b1f2d5e0659c2f568d44d20539b273792563ca7"}, + {file = "grpcio-1.43.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8b2b9dc4d7897566723b77422e11c009a0ebd397966b165b21b89a62891a9fdf"}, + {file = "grpcio-1.43.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:77ef653f966934b3bfdd00e4f2064b68880eb40cf09b0b99edfa5ee22a44f559"}, + {file = "grpcio-1.43.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e95b5d62ec26d0cd0b90c202d73e7cb927c369c3358e027225239a4e354967dc"}, + {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:04239e8f71db832c26bbbedb4537b37550a39d77681d748ab4678e58dd6455d6"}, + {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b4a7152187a49767a47d1413edde2304c96f41f7bc92cc512e230dfd0fba095"}, + {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8cc936a29c65ab39714e1ba67a694c41218f98b6e2a64efb83f04d9abc4386b"}, + {file = "grpcio-1.43.0-cp37-cp37m-win32.whl", hash = "sha256:577e024c8dd5f27cd98ba850bc4e890f07d4b5942e5bc059a3d88843a2f48f66"}, + {file = "grpcio-1.43.0-cp37-cp37m-win_amd64.whl", hash = "sha256:138f57e3445d4a48d9a8a5af1538fdaafaa50a0a3c243f281d8df0edf221dc02"}, + {file = "grpcio-1.43.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:08cf25f2936629db062aeddbb594bd76b3383ab0ede75ef0461a3b0bc3a2c150"}, + {file = "grpcio-1.43.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:01f4b887ed703fe82ebe613e1d2dadea517891725e17e7a6134dcd00352bd28c"}, + {file = "grpcio-1.43.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0aa8285f284338eb68962fe1a830291db06f366ea12f213399b520c062b01f65"}, + {file = "grpcio-1.43.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:0edbfeb6729aa9da33ce7e28fb7703b3754934115454ae45e8cc1db601756fd3"}, + {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:c354017819201053d65212befd1dcb65c2d91b704d8977e696bae79c47cd2f82"}, + {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50cfb7e1067ee5e00b8ab100a6b7ea322d37ec6672c0455106520b5891c4b5f5"}, + {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57f1aeb65ed17dfb2f6cd717cc109910fe395133af7257a9c729c0b9604eac10"}, + {file = "grpcio-1.43.0-cp38-cp38-win32.whl", hash = "sha256:fa26a8bbb3fe57845acb1329ff700d5c7eaf06414c3e15f4cb8923f3a466ef64"}, + {file = "grpcio-1.43.0-cp38-cp38-win_amd64.whl", hash = "sha256:ade8b79a6b6aea68adb9d4bfeba5d647667d842202c5d8f3ba37ac1dc8e5c09c"}, + {file = "grpcio-1.43.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:124e718faf96fe44c98b05f3f475076be8b5198bb4c52a13208acf88a8548ba9"}, + {file = "grpcio-1.43.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2f96142d0abc91290a63ba203f01649e498302b1b6007c67bad17f823ecde0cf"}, + {file = "grpcio-1.43.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:31e6e489ccd8f08884b9349a39610982df48535881ec34f05a11c6e6b6ebf9d0"}, + {file = "grpcio-1.43.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0e731f660e1e68238f56f4ce11156f02fd06dc58bc7834778d42c0081d4ef5ad"}, + {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1f16725a320460435a8a5339d8b06c4e00d307ab5ad56746af2e22b5f9c50932"}, + {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b4543e13acb4806917d883d0f70f21ba93b29672ea81f4aaba14821aaf9bb0"}, + {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:594aaa0469f4fca7773e80d8c27bf1298e7bbce5f6da0f084b07489a708f16ab"}, + {file = "grpcio-1.43.0-cp39-cp39-win32.whl", hash = "sha256:5449ae564349e7a738b8c38583c0aad954b0d5d1dd3cea68953bfc32eaee11e3"}, + {file = "grpcio-1.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:bdf41550815a831384d21a498b20597417fd31bd084deb17d31ceb39ad9acc79"}, + {file = "grpcio-1.43.0.tar.gz", hash = "sha256:735d9a437c262ab039d02defddcb9f8f545d7009ae61c0114e19dda3843febe5"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] -idna-ssl = [ - {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, -] importlib-metadata = [ {file = "importlib_metadata-2.0.0-py2.py3-none-any.whl", hash = "sha256:cefa1a2f919b866c5beb7c9f7b0ebb4061f30a8a9bf16d609b000e2dfaceb9c3"}, {file = "importlib_metadata-2.0.0.tar.gz", hash = "sha256:77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da"}, @@ -1731,20 +1759,39 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] mistune = [ @@ -1755,25 +1802,6 @@ more-itertools = [ {file = "more-itertools-8.5.0.tar.gz", hash = "sha256:6f83822ae94818eae2612063a5101a7311e68ae8002005b5e05f03fd74a86a20"}, {file = "more_itertools-8.5.0-py3-none-any.whl", hash = "sha256:9b30f12df9393f0d28af9210ff8efe48d10c94f73e5daf886f10c4b0b0b4f03c"}, ] -multidict = [ - {file = "multidict-4.7.6-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000"}, - {file = "multidict-4.7.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a"}, - {file = "multidict-4.7.6-cp35-cp35m-win32.whl", hash = "sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5"}, - {file = "multidict-4.7.6-cp35-cp35m-win_amd64.whl", hash = "sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3"}, - {file = "multidict-4.7.6-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87"}, - {file = "multidict-4.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2"}, - {file = "multidict-4.7.6-cp36-cp36m-win32.whl", hash = "sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7"}, - {file = "multidict-4.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463"}, - {file = "multidict-4.7.6-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d"}, - {file = "multidict-4.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255"}, - {file = "multidict-4.7.6-cp37-cp37m-win32.whl", hash = "sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507"}, - {file = "multidict-4.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c"}, - {file = "multidict-4.7.6-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b"}, - {file = "multidict-4.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7"}, - {file = "multidict-4.7.6-cp38-cp38-win32.whl", hash = "sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d"}, - {file = "multidict-4.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19"}, - {file = "multidict-4.7.6.tar.gz", hash = "sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430"}, -] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, @@ -1835,30 +1863,30 @@ packaging = [ {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, ] pandas = [ - {file = "pandas-0.25.3-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:df8864824b1fe488cf778c3650ee59c3a0d8f42e53707de167ba6b4f7d35f133"}, - {file = "pandas-0.25.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7458c48e3d15b8aaa7d575be60e1e4dd70348efcd9376656b72fecd55c59a4c3"}, - {file = "pandas-0.25.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:61741f5aeb252f39c3031d11405305b6d10ce663c53bc3112705d7ad66c013d0"}, - {file = "pandas-0.25.3-cp35-cp35m-win32.whl", hash = "sha256:adc3d3a3f9e59a38d923e90e20c4922fc62d1e5a03d083440468c6d8f3f1ae0a"}, - {file = "pandas-0.25.3-cp35-cp35m-win_amd64.whl", hash = "sha256:975c461accd14e89d71772e89108a050fa824c0b87a67d34cedf245f6681fc17"}, - {file = "pandas-0.25.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ee50c2142cdcf41995655d499a157d0a812fce55c97d9aad13bc1eef837ed36c"}, - {file = "pandas-0.25.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4545467a637e0e1393f7d05d61dace89689ad6d6f66f267f86fff737b702cce9"}, - {file = "pandas-0.25.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bbe3eb765a0b1e578833d243e2814b60c825b7fdbf4cdfe8e8aae8a08ed56ecf"}, - {file = "pandas-0.25.3-cp36-cp36m-win32.whl", hash = "sha256:8153705d6545fd9eb6dd2bc79301bff08825d2e2f716d5dced48daafc2d0b81f"}, - {file = "pandas-0.25.3-cp36-cp36m-win_amd64.whl", hash = "sha256:26382aab9c119735908d94d2c5c08020a4a0a82969b7e5eefb92f902b3b30ad7"}, - {file = "pandas-0.25.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:00dff3a8e337f5ed7ad295d98a31821d3d0fe7792da82d78d7fd79b89c03ea9d"}, - {file = "pandas-0.25.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e45055c30a608076e31a9fcd780a956ed3b1fa20db61561b8d88b79259f526f7"}, - {file = "pandas-0.25.3-cp37-cp37m-win32.whl", hash = "sha256:255920e63850dc512ce356233081098554d641ba99c3767dde9e9f35630f994b"}, - {file = "pandas-0.25.3-cp37-cp37m-win_amd64.whl", hash = "sha256:22361b1597c8c2ffd697aa9bf85423afa9e1fcfa6b1ea821054a244d5f24d75e"}, - {file = "pandas-0.25.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9962957a27bfb70ab64103d0a7b42fa59c642fb4ed4cb75d0227b7bb9228535d"}, - {file = "pandas-0.25.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78bf638993219311377ce9836b3dc05f627a666d0dbc8cec37c0ff3c9ada673b"}, - {file = "pandas-0.25.3-cp38-cp38-win32.whl", hash = "sha256:6a3ac2c87e4e32a969921d1428525f09462770c349147aa8e9ab95f88c71ec71"}, - {file = "pandas-0.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:33970f4cacdd9a0ddb8f21e151bfb9f178afb7c36eb7c25b9094c02876f385c2"}, - {file = "pandas-0.25.3.tar.gz", hash = "sha256:52da74df8a9c9a103af0a72c9d5fdc8e0183a90884278db7f386b5692a2220a4"}, + {file = "pandas-1.2.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6"}, + {file = "pandas-1.2.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa"}, + {file = "pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558"}, + {file = "pandas-1.2.4-cp37-cp37m-win32.whl", hash = "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f"}, + {file = "pandas-1.2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a"}, + {file = "pandas-1.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4"}, + {file = "pandas-1.2.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12"}, + {file = "pandas-1.2.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4"}, + {file = "pandas-1.2.4-cp38-cp38-win32.whl", hash = "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858"}, + {file = "pandas-1.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686"}, + {file = "pandas-1.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758"}, + {file = "pandas-1.2.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b"}, + {file = "pandas-1.2.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded"}, + {file = "pandas-1.2.4-cp39-cp39-win32.whl", hash = "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950"}, + {file = "pandas-1.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895"}, + {file = "pandas-1.2.4.tar.gz", hash = "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279"}, ] pandas-gbq = [ {file = "pandas-gbq-0.13.2.tar.gz", hash = "sha256:bc3a615c92df76490be33ae1efe60e82b8affd196e39f4f6c9c76d67c32e3d46"}, {file = "pandas_gbq-0.13.2-py3-none-any.whl", hash = "sha256:858539f397dcadf1e2792db49ac9a010098009f47859252d4aeaf03d530ea290"}, ] +pandavro = [ + {file = "pandavro-1.6.0.tar.gz", hash = "sha256:d098da34529fbb20de5fd1a6f231918d1b60941b25bea5dc87897ef0d472cb6f"}, +] pandocfilters = [ {file = "pandocfilters-1.4.2.tar.gz", hash = "sha256:b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9"}, ] @@ -1890,25 +1918,37 @@ prompt-toolkit = [ {file = "prompt_toolkit-3.0.3-py3-none-any.whl", hash = "sha256:c93e53af97f630f12f5f62a3274e79527936ed466f038953dfa379d4941f651a"}, {file = "prompt_toolkit-3.0.3.tar.gz", hash = "sha256:a402e9bf468b63314e37460b68ba68243d55b2f8c4d0192f85a019af3945050e"}, ] +proto-plus = [ + {file = "proto-plus-1.19.9.tar.gz", hash = "sha256:4ca4055f7c5c1a2239ac7a12770a76a16269f58d3f01631523c20fc81dbb14a7"}, + {file = "proto_plus-1.19.9-py3-none-any.whl", hash = "sha256:b21e901cee2fd27f63d7997f7f1d8c149804d59314803ebd491905da48251b91"}, +] protobuf = [ - {file = "protobuf-3.13.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9c2e63c1743cba12737169c447374fab3dfeb18111a460a8c1a000e35836b18c"}, - {file = "protobuf-3.13.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1e834076dfef9e585815757a2c7e4560c7ccc5962b9d09f831214c693a91b463"}, - {file = "protobuf-3.13.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:df3932e1834a64b46ebc262e951cd82c3cf0fa936a154f0a42231140d8237060"}, - {file = "protobuf-3.13.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:8c35bcbed1c0d29b127c886790e9d37e845ffc2725cc1db4bd06d70f4e8359f4"}, - {file = "protobuf-3.13.0-cp35-cp35m-win32.whl", hash = "sha256:339c3a003e3c797bc84499fa32e0aac83c768e67b3de4a5d7a5a9aa3b0da634c"}, - {file = "protobuf-3.13.0-cp35-cp35m-win_amd64.whl", hash = "sha256:361acd76f0ad38c6e38f14d08775514fbd241316cce08deb2ce914c7dfa1184a"}, - {file = "protobuf-3.13.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9edfdc679a3669988ec55a989ff62449f670dfa7018df6ad7f04e8dbacb10630"}, - {file = "protobuf-3.13.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5db9d3e12b6ede5e601b8d8684a7f9d90581882925c96acf8495957b4f1b204b"}, - {file = "protobuf-3.13.0-cp36-cp36m-win32.whl", hash = "sha256:c8abd7605185836f6f11f97b21200f8a864f9cb078a193fe3c9e235711d3ff1e"}, - {file = "protobuf-3.13.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4d1174c9ed303070ad59553f435846a2f877598f59f9afc1b89757bdf846f2a7"}, - {file = "protobuf-3.13.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0bba42f439bf45c0f600c3c5993666fcb88e8441d011fad80a11df6f324eef33"}, - {file = "protobuf-3.13.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c0c5ab9c4b1eac0a9b838f1e46038c3175a95b0f2d944385884af72876bd6bc7"}, - {file = "protobuf-3.13.0-cp37-cp37m-win32.whl", hash = "sha256:f68eb9d03c7d84bd01c790948320b768de8559761897763731294e3bc316decb"}, - {file = "protobuf-3.13.0-cp37-cp37m-win_amd64.whl", hash = "sha256:91c2d897da84c62816e2f473ece60ebfeab024a16c1751aaf31100127ccd93ec"}, - {file = "protobuf-3.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3dee442884a18c16d023e52e32dd34a8930a889e511af493f6dc7d4d9bf12e4f"}, - {file = "protobuf-3.13.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:e7662437ca1e0c51b93cadb988f9b353fa6b8013c0385d63a70c8a77d84da5f9"}, - {file = "protobuf-3.13.0-py2.py3-none-any.whl", hash = "sha256:d69697acac76d9f250ab745b46c725edf3e98ac24763990b24d58c16c642947a"}, - {file = "protobuf-3.13.0.tar.gz", hash = "sha256:6a82e0c8bb2bf58f606040cc5814e07715b2094caeba281e2e7d0b0e2e397db5"}, + {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, + {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, + {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, + {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, + {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, + {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, + {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, + {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, + {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, + {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, + {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, + {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, + {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, + {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, + {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, + {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, + {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, + {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, ] ptyprocess = [ {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"}, @@ -1922,6 +1962,44 @@ pyaml = [ {file = "pyaml-20.4.0-py2.py3-none-any.whl", hash = "sha256:67081749a82b72c45e5f7f812ee3a14a03b3f5c25ff36ec3b290514f8c4c4b99"}, {file = "pyaml-20.4.0.tar.gz", hash = "sha256:29a5c2a68660a799103d6949167bd6c7953d031449d08802386372de1db6ad71"}, ] +pyarrow = [ + {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = "sha256:c7a6e7e0bf8779e9c3428ced85507541f3da9a0675e2f4781d4eb2c7042cbf81"}, + {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:7a683f71b848eb6310b4ec48c0def55dac839e9994c1ac874c9b2d3d5625def1"}, + {file = "pyarrow-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5144bd9db2920c7cb566c96462d62443cc239104f94771d110f74393f2fb42a2"}, + {file = "pyarrow-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed0be080cf595ea15ff1c9ff4097bbf1fcc4b50847d98c0a3c0412fbc6ede7e9"}, + {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:072c1a0fca4509eefd7d018b78542fb7e5c63aaf5698f1c0a6e45628ae17ba44"}, + {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5bed4f948c032c40597302e9bdfa65f62295240306976ecbe43a54924c6f94f"}, + {file = "pyarrow-6.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:465f87fa0be0b2928b2beeba22b5813a0203fb05d90fd8563eea48e08ecc030e"}, + {file = "pyarrow-6.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:ddf2e6e3b321adaaf716f2d5af8e92d205a9671e0cb7c0779710a567fd1dd580"}, + {file = "pyarrow-6.0.0-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:0204e80777ab8f4e9abd3a765a8ec07ed1e3c4630bacda50d2ce212ef0f3826f"}, + {file = "pyarrow-6.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:82fe80309e01acf29e3943a1f6d3c98ec109fe1d356bc1ac37d639bcaadcf684"}, + {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:281ce5fa03621d786a9beb514abb09846db7f0221b50eabf543caa24037eaacd"}, + {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5408fa8d623e66a0445f3fb0e4027fd219bf99bfb57422d543d7b7876e2c5b55"}, + {file = "pyarrow-6.0.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a19e58dfb04e451cd8b7bdec3ac8848373b95dfc53492c9a69789aa9074a3c1b"}, + {file = "pyarrow-6.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b86d175262db1eb46afdceb36d459409eb6f8e532d3dec162f8bf572c7f57623"}, + {file = "pyarrow-6.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:2d2c681659396c745e4f1988d5dd41dcc3ad557bb8d4a8c2e44030edafc08a91"}, + {file = "pyarrow-6.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c666bc6a1cebf01206e2dc1ab05f25f39f35d3a499e0ef5cd635225e07306ca"}, + {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8d41dfb09ba9236cca6245f33088eb42f3c54023da281139241e0f9f3b4b754e"}, + {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c746ef42c039348a288584800e299456c80c5691401bb9b19aa9c02a427b7"}, + {file = "pyarrow-6.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c38263ea438a1666b13372e7565450cfeec32dbcd1c2595749476a58465eaec"}, + {file = "pyarrow-6.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e81508239a71943759cee272ce625ae208092dd36ef2c6713fccee30bbcf52bb"}, + {file = "pyarrow-6.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:a50d2f77b86af38ceabf45617208b9105d20e7a5eebc584e7c8c0acededd82ce"}, + {file = "pyarrow-6.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda7595f24a639bcef3419ecfac17216efacb09f7b0f1b4c4c97f900d65ca0e"}, + {file = "pyarrow-6.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bf3400780c4d3c9cb43b1e8a1aaf2e1b7199a0572d0a645529d2784e4d0d8497"}, + {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:15dc0d673d3f865ca63c877bd7a2eced70b0a08969fb733a28247134b8a1f18b"}, + {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1d9a2f4ee812ed0bd4182cabef99ea914ac297274f0de086f2488093d284ef"}, + {file = "pyarrow-6.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d046dc78a9337baa6415be915c5a16222505233e238a1017f368243c89817eea"}, + {file = "pyarrow-6.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:ea64a48a85c631eb2a0ea13ccdec5143c85b5897836b16331ee4289d27a57247"}, + {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = "sha256:cc1d4a70efd583befe92d4ea6f74ed2e0aa31ccdde767cd5cae8e77c65a1c2d4"}, + {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:004185e0babc6f3c3fba6ba4f106e406a0113d0f82bb9ad9a8571a1978c45d04"}, + {file = "pyarrow-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8c23f8cdecd3d9e49f9b0f9a651ae5549d1d32fd4901fb1bdc2d327edfba844f"}, + {file = "pyarrow-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb701ec4a94b92102606d4e88f0b8eba34f09a5ad8e014eaa4af76f42b7f62ae"}, + {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da7860688c33ca88ac05f1a487d32d96d9caa091412496c35f3d1d832145675a"}, + {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac941a147d14993987cc8b605b721735a34b3e54d167302501fb4db1ad7382c7"}, + {file = "pyarrow-6.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6163d82cca7541774b00503c295fe86a1722820eddb958b57f091bb6f5b0a6db"}, + {file = "pyarrow-6.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:376c4b5f248ae63df21fe15c194e9013753164be2d38f4b3fb8bde63ac5a1958"}, + {file = "pyarrow-6.0.0.tar.gz", hash = "sha256:5be62679201c441356d3f2a739895dcc8d4d299f2a6eabcd2163bfb6a898abba"}, +] pyasn1 = [ {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, @@ -1979,6 +2057,10 @@ python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] +python-slugify = [ + {file = "python-slugify-5.0.2.tar.gz", hash = "sha256:f13383a0b9fcbe649a1892b9c8eb4f8eab1d6d84b84bb7a624317afa98159cab"}, + {file = "python_slugify-5.0.2-py2.py3-none-any.whl", hash = "sha256:6d8c5df75cd4a7c3a2d21e257633de53f52ab0265cd2d1dc62a730e8194a7380"}, +] pytz = [ {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, @@ -2020,6 +2102,8 @@ pyyaml = [ {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, + {file = "PyYAML-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a"}, + {file = "PyYAML-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e"}, {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] pyzmq = [ @@ -2048,6 +2132,10 @@ pyzmq = [ {file = "pyzmq-19.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:654d3e06a4edc566b416c10293064732516cf8871a4522e0a2ba00cc2a2e600c"}, {file = "pyzmq-19.0.2-cp38-cp38-win32.whl", hash = "sha256:276ad604bffd70992a386a84bea34883e696a6b22e7378053e5d3227321d9702"}, {file = "pyzmq-19.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:09d24a80ccb8cbda1af6ed8eb26b005b6743e58e9290566d2a6841f4e31fa8e0"}, + {file = "pyzmq-19.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:18189fc59ff5bf46b7ccf5a65c1963326dbfc85a2bc73e9f4a90a40322b992c8"}, + {file = "pyzmq-19.0.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b1dd4cf4c5e09cbeef0aee83f3b8af1e9986c086a8927b261c042655607571e8"}, + {file = "pyzmq-19.0.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c6d653bab76b3925c65d4ac2ddbdffe09710f3f41cc7f177299e8c4498adb04a"}, + {file = "pyzmq-19.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:949a219493a861c263b75a16588eadeeeab08f372e25ff4a15a00f73dfe341f4"}, {file = "pyzmq-19.0.2-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:c1a31cd42905b405530e92bdb70a8a56f048c8a371728b8acf9d746ecd4482c0"}, {file = "pyzmq-19.0.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a7e7f930039ee0c4c26e4dfee015f20bd6919cd8b97c9cd7afbde2923a5167b6"}, {file = "pyzmq-19.0.2.tar.gz", hash = "sha256:296540a065c8c21b26d63e3cea2d1d57902373b16e4256afe46422691903a438"}, @@ -2081,6 +2169,12 @@ regex = [ {file = "regex-2020.9.27-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637"}, {file = "regex-2020.9.27-cp38-cp38-win32.whl", hash = "sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f"}, {file = "regex-2020.9.27-cp38-cp38-win_amd64.whl", hash = "sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c"}, + {file = "regex-2020.9.27-cp39-cp39-manylinux1_i686.whl", hash = "sha256:84cada8effefe9a9f53f9b0d2ba9b7b6f5edf8d2155f9fdbe34616e06ececf81"}, + {file = "regex-2020.9.27-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:816064fc915796ea1f26966163f6845de5af78923dfcecf6551e095f00983650"}, + {file = "regex-2020.9.27-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:5d892a4f1c999834eaa3c32bc9e8b976c5825116cde553928c4c8e7e48ebda67"}, + {file = "regex-2020.9.27-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c9443124c67b1515e4fe0bb0aa18df640965e1030f468a2a5dc2589b26d130ad"}, + {file = "regex-2020.9.27-cp39-cp39-win32.whl", hash = "sha256:49f23ebd5ac073765ecbcf046edc10d63dcab2f4ae2bce160982cb30df0c0302"}, + {file = "regex-2020.9.27-cp39-cp39-win_amd64.whl", hash = "sha256:3d20024a70b97b4f9546696cbf2fd30bae5f42229fbddf8661261b1eaff0deb7"}, {file = "regex-2020.9.27.tar.gz", hash = "sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d"}, ] requests = [ @@ -2096,6 +2190,37 @@ rsa = [ {file = "rsa-4.6-py3-none-any.whl", hash = "sha256:6166864e23d6b5195a5cfed6cd9fed0fe774e226d8f854fcb23b7bbef0350233"}, {file = "rsa-4.6.tar.gz", hash = "sha256:109ea5a66744dd859bf16fe904b8d8b627adafb9408753161e766a92e7d681fa"}, ] +"ruamel.yaml" = [ + {file = "ruamel.yaml-0.17.10-py3-none-any.whl", hash = "sha256:ffb9b703853e9e8b7861606dfdab1026cf02505bade0653d1880f4b2db47f815"}, + {file = "ruamel.yaml-0.17.10.tar.gz", hash = "sha256:106bc8d6dc6a0ff7c9196a47570432036f41d556b779c6b4e618085f57e39e67"}, +] +"ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, +] send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, @@ -2112,6 +2237,10 @@ testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, ] +text-unidecode = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] toml = [ {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, @@ -2131,6 +2260,10 @@ tornado = [ {file = "tornado-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c58d56003daf1b616336781b26d184023ea4af13ae143d9dda65e31e534940b9"}, {file = "tornado-6.0.4.tar.gz", hash = "sha256:0fe2d45ba43b00a41cd73f8be321a44936dc1aba233dee979f17a042b83eb6dc"}, ] +tqdm = [ + {file = "tqdm-4.50.2-py2.py3-none-any.whl", hash = "sha256:43ca183da3367578ebf2f1c2e3111d51ea161ed1dc4e6345b86e27c2a93beff7"}, + {file = "tqdm-4.50.2.tar.gz", hash = "sha256:69dfa6714dee976e2425a9aab84b622675b7b1742873041e3db8a8e86132a4af"}, +] traitlets = [ {file = "traitlets-4.3.3-py2.py3-none-any.whl", hash = "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44"}, {file = "traitlets-4.3.3.tar.gz", hash = "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"}, @@ -2143,19 +2276,28 @@ typed-ast = [ {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f"}, {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298"}, {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d"}, {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, + {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d"}, + {file = "typed_ast-1.4.1-cp39-cp39-win32.whl", hash = "sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395"}, + {file = "typed_ast-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c"}, {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, ] typing-extensions = [ @@ -2179,25 +2321,6 @@ widgetsnbextension = [ {file = "widgetsnbextension-3.5.1-py2.py3-none-any.whl", hash = "sha256:bd314f8ceb488571a5ffea6cc5b9fc6cba0adaf88a9d2386b93a489751938bcd"}, {file = "widgetsnbextension-3.5.1.tar.gz", hash = "sha256:079f87d87270bce047512400efd70238820751a11d2d8cb137a5a5bdbaf255c7"}, ] -yarl = [ - {file = "yarl-1.6.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1"}, - {file = "yarl-1.6.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188"}, - {file = "yarl-1.6.0-cp35-cp35m-win32.whl", hash = "sha256:5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580"}, - {file = "yarl-1.6.0-cp35-cp35m-win_amd64.whl", hash = "sha256:5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc"}, - {file = "yarl-1.6.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a"}, - {file = "yarl-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a"}, - {file = "yarl-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e"}, - {file = "yarl-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2"}, - {file = "yarl-1.6.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e"}, - {file = "yarl-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a"}, - {file = "yarl-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131"}, - {file = "yarl-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d"}, - {file = "yarl-1.6.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921"}, - {file = "yarl-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1"}, - {file = "yarl-1.6.0-cp38-cp38-win32.whl", hash = "sha256:3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5"}, - {file = "yarl-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020"}, - {file = "yarl-1.6.0.tar.gz", hash = "sha256:61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b"}, -] zipp = [ {file = "zipp-3.2.0-py3-none-any.whl", hash = "sha256:43f4fa8d8bb313e65d8323a3952ef8756bf40f9a5c3ea7334be23ee4ec8278b6"}, {file = "zipp-3.2.0.tar.gz", hash = "sha256:b52f22895f4cfce194bc8172f3819ee8de7540aa6d873535a8668b730b8b411f"}, diff --git a/python-package/pyproject.toml b/python-package/pyproject.toml index ebb6e9ec1..3d4ec8d53 100644 --- a/python-package/pyproject.toml +++ b/python-package/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -authors = ["Joao Carabetta ", "Ricardo Dahis","Diego Oliveira"] +authors = ["Joao Carabetta ", "Ricardo Dahis", "Diego Oliveira"] classifiers = [ "Topic :: Software Development :: Build Tools", "Topic :: Software Development :: Libraries :: Python Modules", @@ -13,43 +13,44 @@ packages = [ ] readme = "README.md" repository = "https://github.com/base-dos-dados/bases" -version = "1.6.1-beta.2" +version = "1.6.2" [tool.poetry.scripts] basedosdados = 'basedosdados.cli.cli:cli' [tool.poetry.dependencies] -Jinja2 = "2.11.2" +Jinja2 = "3.0.3" +ckanapi = "4.6" click = "8.0.3" google-cloud-bigquery = "2.30.1" google-cloud-bigquery-storage = "1.1.0" google-cloud-storage = "1.42.3" pandas-gbq = "0.13.2" pyaml = "20.4.0" -python = ">=3.7.1" +python = ">=3.7.1,<3.11" tomlkit = "0.7.0" tqdm = "4.50.2" pandas= "1.2.4" -ckanapi = "4.6" 'ruamel.yaml'= "0.17.10" pyarrow = "6.0.0" +pandavro = "^1.6.0" [tool.poetry.dev-dependencies] -Jinja2 = "2.11.2" +Jinja2 = "3.0.3" black = "20.8b1" +ckanapi = "4.6" click = "8.0.3" -google-cloud-bigquery = "1.28.0" +google-cloud-bigquery = "2.30.1" google-cloud-bigquery-storage = "1.1.0" google-cloud-storage = "1.42.3" ipykernel = "5.3.4" jupyter = "1.0.0" +pandas = "1.2.4" pyaml = "20.4.0" pytest = "6.0.2" +'ruamel.yaml' = "0.17.10" tomlkit = "0.7.0" tqdm = "4.50.2" -pandas= "1.2.4" -ckanapi = "4.6" -'ruamel.yaml'= "0.17.10" [tool.black] # Use the more relaxed max line length permitted in PEP8. diff --git a/python-package/tests/sample_data/table/municipio.avro b/python-package/tests/sample_data/table/municipio.avro new file mode 100644 index 000000000..ec917f505 Binary files /dev/null and b/python-package/tests/sample_data/table/municipio.avro differ diff --git a/python-package/tests/sample_data/table/municipio.parquet b/python-package/tests/sample_data/table/municipio.parquet new file mode 100644 index 000000000..c8f1cad46 Binary files /dev/null and b/python-package/tests/sample_data/table/municipio.parquet differ diff --git a/python-package/tests/test_download/test_metadata.py b/python-package/tests/test_download/test_metadata.py index 048fcbd08..d6788de83 100644 --- a/python-package/tests/test_download/test_metadata.py +++ b/python-package/tests/test_download/test_metadata.py @@ -1,9 +1,9 @@ -from os import read import pytest from pathlib import Path import pandas as pd from pandas_gbq.gbq import GenericGBQException import shutil +import requests from basedosdados import ( list_datasets, @@ -12,110 +12,114 @@ get_table_description, get_table_columns, get_table_size, + search, ) +from basedosdados.download.metadata import _safe_fetch TEST_PROJECT_ID = "basedosdados-dev" SAVEFILE = Path(__file__).parent / "tmp_bases" / "test.csv" SAVEPATH = Path(__file__).parent / "tmp_bases" shutil.rmtree(SAVEPATH, ignore_errors=True) +def test_list_datasets_simple_verbose(capsys): -def test_list_datasets(capsys): - - list_datasets(from_file=True) + out = list_datasets( + query="trabalho", limit=10, with_description=False, verbose=True + ) out, err = capsys.readouterr() # Capture prints assert "dataset_id" in out + # check input error + with pytest.raises(ValueError): + search(query="trabalho", order_by="name") + +def test_list_datasets_simple_list(): -def test_list_datasets_complete(capsys): + out = list_datasets(query="", limit=12, with_description=False, verbose=False) + # check if function returns list + assert isinstance(out, list) + assert len(out) == 12 - list_datasets(with_description=True, filter_by="ibge", from_file=True) + +def test_list_datasets_complete_list(): + + out = list_datasets( + query="trabalho", limit=12, with_description=True, verbose=False + ) + # check if function returns list + assert isinstance(out, list) + assert "dataset_id" in out[0].keys() + assert "description" in out[0].keys() + + +def test_list_datasets_complete_verbose(capsys): + + list_datasets(query="trabalho", limit=10, with_description=True, verbose=True) out, err = capsys.readouterr() # Capture prints assert "dataset_id" in out assert "description" in out -def test_list_datasets_all_descriptions(capsys): +def test_list_dataset_tables_simple_verbose(capsys): - list_datasets(with_description=True, from_file=True) + list_dataset_tables(dataset_id="br_me_caged", with_description=False, verbose=True) out, err = capsys.readouterr() # Capture prints - assert len(out) > 0 + assert "table_id" in out -def test_list_datasets_verbose_false(): +def test_list_dataset_tables_simple_list(): + + out = list_dataset_tables( + dataset_id="br_me_caged", with_description=False, verbose=False + ) - out = list_datasets(from_file=True, verbose=False) assert type(out) == list assert len(out) > 0 -def test_list_dataset_tables(capsys): - - list_dataset_tables(dataset_id="br_ibge_censo_demografico", from_file=True) - out, err = capsys.readouterr() # Capture prints - assert "table_id" in out +def test_list_dataset_tables_complete_verbose(capsys): + list_dataset_tables(dataset_id="br_me_caged", with_description=True, verbose=True) -def test_list_dataset_tables_complete(capsys): - - list_dataset_tables( - dataset_id="br_ibge_censo_demografico", - filter_by="renda", - with_description=True, - from_file=True, - ) out, err = capsys.readouterr() # Capture prints assert "table_id" in out assert "description" in out - assert "renda" in out -def test_list_dataset_tables_all_descriptions(capsys): - list_dataset_tables( - dataset_id="br_ibge_censo_demografico", with_description=True, from_file=True - ) - out, err = capsys.readouterr() # Capture prints - assert len(out) > 0 - - -def test_list_dataset_tables_verbose_false(): +def test_list_dataset_tables_complete_list(): out = list_dataset_tables( - dataset_id="br_ibge_censo_demografico", from_file=True, verbose=False + dataset_id="br_me_caged", with_description=True, verbose=False ) + assert type(out) == list - assert len(out) > 0 + assert type(out[0]) == dict def test_get_dataset_description(capsys): - get_dataset_description("br_ibge_censo_demografico", from_file=True) + get_dataset_description("br_me_caged", verbose=True) out, err = capsys.readouterr() # Capture prints assert len(out) > 0 def test_get_dataset_description_verbose_false(): - out = get_dataset_description( - "br_ibge_censo_demografico", from_file=True, verbose=False - ) + out = get_dataset_description("br_me_caged", verbose=False) assert type(out) == str assert len(out) > 0 def test_get_table_description(capsys): - get_table_description( - "br_ibge_censo_demografico", "setor_censitario_basico_2010", from_file=True - ) + get_table_description("br_me_caged", "microdados_antigos") out, err = capsys.readouterr() # Capture prints assert len(out) > 0 def test_get_table_description_verbose_false(): out = get_table_description( - dataset_id="br_ibge_censo_demografico", - table_id="setor_censitario_basico_2010", - from_file=True, + dataset_id="br_me_caged", + table_id="microdados_antigos", verbose=False, ) assert type(out) == str @@ -124,46 +128,46 @@ def test_get_table_description_verbose_false(): def test_get_table_columns(capsys): get_table_columns( - dataset_id="br_ibge_censo_demografico", - table_id="setor_censitario_basico_2010", - from_file=True, + dataset_id="br_me_caged", + table_id="microdados_antigos", ) out, err = capsys.readouterr() # Capture prints assert "name" in out - assert "field_type" in out assert "description" in out def test_get_table_columns_verbose_false(): out = get_table_columns( - dataset_id="br_ibge_censo_demografico", - table_id="setor_censitario_basico_2010", - from_file=True, + dataset_id="br_me_caged", + table_id="microdados_antigos", verbose=False, ) assert type(out) == list assert len(out) > 0 +def test_search(): + out = search(query="agua", order_by="score") + # check if function returns pd.DataFrame + assert isinstance(out, pd.DataFrame) + # check if there is duplicate tables in the result + assert out.id.nunique() == out.shape[0] + # check input error + with pytest.raises(ValueError): + search(query="agua", order_by="name") def test_get_table_size(capsys): get_table_size( dataset_id="br_ibge_censo_demografico", table_id="setor_censitario_basico_2010", - billing_project_id=TEST_PROJECT_ID, - from_file=True, ) out, err = capsys.readouterr() - assert "num_rows" in out - assert "size_mb" in out + assert "not available" in out +def test__safe_fetch(capsys): -def test_get_table_size_verbose_false(): - out = get_table_size( - dataset_id="br_ibge_censo_demografico", - table_id="setor_censitario_basico_2010", - billing_project_id=TEST_PROJECT_ID, - from_file=True, - verbose=False, - ) - assert type(out) == list - assert len(out) > 0 + _safe_fetch("https://www.lkajsdhgfal.com.br") + out, err = capsys.readouterr() # Capture prints + assert "HTTPSConnection" in out + + response = _safe_fetch("https://basedosdados.org/api/3/action/bd_dataset_search?q=agua&page_size=10&resource_type=bdm_table") + assert type(response.json())==dict diff --git a/python-package/tests/test_table.py b/python-package/tests/test_table.py index d15191448..6c57eb294 100644 --- a/python-package/tests/test_table.py +++ b/python-package/tests/test_table.py @@ -4,6 +4,7 @@ import shutil from google.api_core.exceptions import NotFound +import basedosdados as bd from basedosdados import Dataset, Table, Storage from basedosdados.exceptions import BaseDosDadosException @@ -38,9 +39,18 @@ def sample_data(metadatadir): @pytest.fixture -def data_path(sample_data): +def data_csv_path(sample_data): return sample_data / "municipio.csv" - + + +@pytest.fixture +def data_parquet_path(sample_data): + return sample_data / "municipio.parquet" + + +@pytest.fixture +def data_avro_path(sample_data): + return sample_data / "municipio.avro" def check_files(folder): @@ -52,7 +62,7 @@ def test_init( table, metadatadir, folder, - data_path, + data_csv_path, ): # remove folder @@ -60,36 +70,36 @@ def test_init( Dataset(dataset_id=DATASET_ID, metadata_path=metadatadir).init(replace=True) - table.init(data_sample_path=data_path) + table.init(data_sample_path=data_csv_path) check_files(folder) -def test_init_file_exists_error(table, metadatadir, data_path): +def test_init_file_exists_error(table, metadatadir, data_csv_path): with pytest.raises(FileExistsError): table.init(if_folder_exists="raise", if_table_config_exists="replace") with pytest.raises(FileExistsError): table.init( - data_sample_path=data_path, + data_sample_path=data_csv_path, if_folder_exists="replace", if_table_config_exists="raise", ) -def test_init_not_implemented_error(table, metadatadir, data_path): +def test_init_not_implemented_error(table, metadatadir, data_csv_path): with pytest.raises(NotImplementedError): table.init( if_folder_exists="replace", if_table_config_exists="replace", - data_sample_path=data_path, + data_sample_path=data_csv_path, source_format="json", ) -def test_init_no_path(table, metadatadir, data_path): +def test_init_no_path(table, metadatadir, data_csv_path): with pytest.raises(BaseDosDadosException): table.init(if_folder_exists="replace", if_table_config_exists="replace") @@ -102,7 +112,7 @@ def test_init_no_path(table, metadatadir, data_path): def test_init_if_folder_exists_pass_if_table_config_replace( - table, metadatadir, folder, data_path + table, metadatadir, folder, data_csv_path ): table.init(if_folder_exists="pass", if_table_config_exists="replace") @@ -111,12 +121,12 @@ def test_init_if_folder_exists_pass_if_table_config_replace( def test_init_if_folder_exists_replace_if_table_config_raise( - table, metadatadir, folder, data_path + table, metadatadir, folder, data_csv_path ): shutil.rmtree(table.table_folder) table.init( - data_sample_path=data_path, + data_sample_path=data_csv_path, if_folder_exists="replace", if_table_config_exists="raise", ) @@ -124,11 +134,11 @@ def test_init_if_folder_exists_replace_if_table_config_raise( def test_init_if_folder_exists_replace_if_table_config_exists_replace( - table, metadatadir, folder, data_path + table, metadatadir, folder, data_csv_path ): table.init( - data_sample_path=data_path, + data_sample_path=data_csv_path, if_folder_exists="replace", if_table_config_exists="replace", ) @@ -155,7 +165,7 @@ def test_delete(table): assert not table_exists(table, mode="prod") -def test_create_no_path_error(table, metadatadir, data_path, sample_data): +def test_create_no_path_error(table, metadatadir, data_csv_path, sample_data): shutil.rmtree(metadatadir / DATASET_ID / TABLE_ID, ignore_errors=True) @@ -168,13 +178,13 @@ def test_create_no_path_error(table, metadatadir, data_path, sample_data): table.create(if_table_config_exists="replace") -def test_create_no_path(table, metadatadir, data_path, sample_data): +def test_create_no_path(table, metadatadir, data_csv_path, sample_data): Storage(dataset_id=DATASET_ID, table_id=TABLE_ID, metadata_path=metadatadir).upload( - data_path, mode="staging", if_exists="replace" + data_csv_path, mode="staging", if_exists="replace" ) - table.init(data_sample_path=data_path, if_folder_exists="replace") + table.init(data_sample_path=data_csv_path, if_folder_exists="replace") for file in TABLE_FILES: shutil.copy(sample_data / file, table.table_folder / file) @@ -184,7 +194,7 @@ def test_create_no_path(table, metadatadir, data_path, sample_data): def test_create_storage_data_exist_table_config_exist( - table, metadatadir, data_path, sample_data + table, metadatadir, data_csv_path, sample_data ): shutil.rmtree(metadatadir / DATASET_ID / TABLE_ID, ignore_errors=True) @@ -192,11 +202,11 @@ def test_create_storage_data_exist_table_config_exist( Dataset(dataset_id=DATASET_ID, metadata_path=metadatadir).create(if_exists="pass") Storage(dataset_id=DATASET_ID, table_id=TABLE_ID, metadata_path=metadatadir).upload( - data_path, mode="staging", if_exists="replace" + data_csv_path, mode="staging", if_exists="replace" ) table.init( - data_sample_path=data_path, + data_sample_path=data_csv_path, if_folder_exists="replace", if_table_config_exists="replace", ) @@ -207,14 +217,14 @@ def test_create_storage_data_exist_table_config_exist( table.delete(mode="all") table.create( - data_path, + data_csv_path, if_storage_data_exists="pass", if_table_config_exists="pass", ) assert table_exists(table, "staging") -def test_create_if_table_exist_replace(table, metadatadir, data_path, sample_data): +def test_create_if_table_exist_replace(table, metadatadir, data_csv_path, sample_data): table.create( if_table_exists="replace", @@ -224,10 +234,48 @@ def test_create_if_table_exist_replace(table, metadatadir, data_path, sample_dat assert table_exists(table, "staging") -def table_create_not_implemented_source_format(table): +def test_table_create_avro_implemented_source_format(data_avro_path): + table = bd.Table("ds_test", "tb_test") + # table.delete(mode="all") + table.create( + path=data_avro_path, + if_table_exists="replace", + if_storage_data_exists="pass", + if_table_config_exists="replace", + source_format="avro", + ) + assert table_exists(table, "staging") + + +def test_table_create_parquet_implemented_source_format(data_parquet_path): + table = bd.Table("ds_test", "tb_test") + # table.delete(mode="all") + table.create( + path=data_parquet_path, + if_table_exists="replace", + if_storage_data_exists="pass", + if_table_config_exists="replace", + source_format="parquet", + ) + assert table_exists(table, "staging") + + +def test_table_create_csv_implemented_source_format(data_csv_path): + table = bd.Table(dataset_id="ds_test", table_id="tb_test") + table.create( + path=data_csv_path, + if_table_exists="replace", + if_storage_data_exists="replace", + if_table_config_exists="replace", + source_format="csv", + ) + assert table_exists(table, "staging") + +def test_table_create_not_implemented_source_format(table, data_csv_path): with pytest.raises(NotImplementedError): table.create( + path=data_csv_path, if_table_exists="replace", if_storage_data_exists="pass", if_table_config_exists="pass", @@ -235,7 +283,7 @@ def table_create_not_implemented_source_format(table): ) -def test_create_if_table_exists_pass(table, metadatadir, data_path, sample_data): +def test_create_if_table_exists_pass(table, metadatadir, data_csv_path, sample_data): table.create( if_table_exists="pass", if_storage_data_exists="pass", @@ -245,38 +293,38 @@ def test_create_if_table_exists_pass(table, metadatadir, data_path, sample_data) def test_create_if_storage_data_replace_if_table_config_pass( - table, metadatadir, data_path, sample_data + table, metadatadir, data_csv_path, sample_data ): table.delete("all") table.create( - data_path, + data_csv_path, if_storage_data_exists="replace", if_table_config_exists="pass", ) assert table_exists(table, "staging") -def test_create_if_table_exists_raise(table, metadatadir, data_path): +def test_create_if_table_exists_raise(table, metadatadir, data_csv_path): with pytest.raises(FileExistsError): table.create(if_storage_data_exists="pass", if_table_config_exists="pass") -def test_create_with_path(table, metadatadir, data_path, sample_data): +def test_create_with_path(table, metadatadir, data_csv_path, sample_data): table.delete("all") Storage(DATASET_ID, TABLE_ID).delete_table(not_found_ok=True) shutil.rmtree(metadatadir / DATASET_ID / TABLE_ID, ignore_errors=True) table.create( - data_path, + data_csv_path, ) assert table_exists(table, mode="staging") def test_create_if_storage_data_pass_if_table_config_pass( - table, metadatadir, data_path, sample_data + table, metadatadir, data_csv_path, sample_data ): for file in TABLE_FILES: shutil.copy(sample_data / file, table.table_folder / file) @@ -291,58 +339,58 @@ def test_create_if_storage_data_pass_if_table_config_pass( def test_create_if_storage_data_pass_if_table_config_replace( - table, metadatadir, data_path, sample_data + table, metadatadir, data_csv_path, sample_data ): table.delete("all") table.create( - data_path, if_storage_data_exists="pass", if_table_config_exists="replace" + data_csv_path, if_storage_data_exists="pass", if_table_config_exists="replace" ) assert table_exists(table, mode="staging") -def test_create_if_folder_exists_raise(table, metadatadir, data_path, sample_data): +def test_create_if_folder_exists_raise(table, metadatadir, data_csv_path, sample_data): with pytest.raises(FileExistsError): - table.create(data_path, if_table_exists="pass", if_storage_data_exists="pass") + table.create(data_csv_path, if_table_exists="pass", if_storage_data_exists="pass") -def test_create_with_upload(table, metadatadir, data_path): +def test_create_with_upload(table, metadatadir, data_csv_path): table.delete("all") Storage(DATASET_ID, TABLE_ID).delete_table(not_found_ok=True) - table.create(data_path, if_table_config_exists="replace") + table.create(data_csv_path, if_table_config_exists="replace") assert table_exists(table, mode="staging") def test_create_if_storage_data_replace_if_table_config_replace( - table, metadatadir, data_path + table, metadatadir, data_csv_path ): table.delete("all") table.create( - data_path, if_storage_data_exists="replace", if_table_config_exists="replace" + data_csv_path, if_storage_data_exists="replace", if_table_config_exists="replace" ) assert table_exists(table, mode="staging") -def test_create_if_storage_data_raise(table, metadatadir, data_path): +def test_create_if_storage_data_raise(table, metadatadir, data_csv_path): Storage(dataset_id=DATASET_ID, table_id=TABLE_ID, metadata_path=metadatadir).upload( - data_path, mode="staging", if_exists="replace" + data_csv_path, mode="staging", if_exists="replace" ) with pytest.raises(Exception): table.create( - data_path, + data_csv_path, if_table_exists="replace", if_table_config_exists="replace", if_storage_data_exists="raise", ) -def test_create_auto_partitions(metadatadir, data_path, sample_data): +def test_create_auto_partitions(metadatadir, data_csv_path, sample_data): shutil.rmtree(metadatadir / "partitions", ignore_errors=True) table_part = Table( @@ -354,7 +402,7 @@ def test_create_auto_partitions(metadatadir, data_path, sample_data): table_part.delete("all") table_part.init( - data_sample_path=data_path, + data_sample_path=data_csv_path, if_folder_exists="replace", if_table_config_exists="replace", ) @@ -372,7 +420,7 @@ def test_create_auto_partitions(metadatadir, data_path, sample_data): for n in [1, 2]: Path(metadatadir / "partitions" / f"keys={n}").mkdir() shutil.copy( - data_path, + data_csv_path, metadatadir / "partitions" / f"keys={n}" / "municipio.csv", ) @@ -421,10 +469,10 @@ def test_update_raises(metadatadir, sample_data, capsys): assert "table_config.yaml" in out -def test_update(table, metadatadir, data_path): +def test_update(table, metadatadir, data_csv_path): table.create( - data_path, + data_csv_path, if_table_exists="pass", if_storage_data_exists="pass", if_table_config_exists="pass", @@ -437,7 +485,7 @@ def test_update(table, metadatadir, data_path): table.update(mode="all") -def test_publish(table, metadatadir, sample_data, data_path): +def test_publish(table, metadatadir, sample_data, data_csv_path): table.delete("all") shutil.copy( @@ -451,7 +499,7 @@ def test_publish(table, metadatadir, sample_data, data_path): ) table.create( - data_path, + data_csv_path, if_table_exists="replace", if_storage_data_exists="replace", if_table_config_exists="pass", @@ -462,9 +510,9 @@ def test_publish(table, metadatadir, sample_data, data_path): assert table_exists(table, "prod") -def test_append(table, metadatadir, data_path): +def test_append(table, metadatadir, data_csv_path): shutil.copy( - data_path, + data_csv_path, metadatadir / "municipio2.csv", )