Skip to content

Commit 7b874e8

Browse files
committed
pass credentials not cred file, broken cannot pickle
1 parent d4bb6a5 commit 7b874e8

File tree

1 file changed

+12
-10
lines changed

1 file changed

+12
-10
lines changed

dask_bigquery/core.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,13 @@
1313
from google.api_core.gapic_v1 import client_info as grpc_client_info
1414
from google.cloud import bigquery, bigquery_storage
1515
from google.oauth2 import service_account
16+
from google.oauth2.service_account import Credentials
1617

1718
import dask_bigquery
1819

1920

2021
@contextmanager
21-
def bigquery_clients(project_id, cred_fpath):
22+
def bigquery_clients(project_id, credentials):
2223
"""This context manager is a temporary solution until there is an
2324
upstream solution to handle this.
2425
See googleapis/google-cloud-python#9457
@@ -31,11 +32,6 @@ def bigquery_clients(project_id, cred_fpath):
3132
user_agent=f"dask-bigquery/{dask_bigquery.__version__}"
3233
)
3334

34-
if cred_fpath:
35-
credentials = service_account.Credentials.from_service_account_file(cred_fpath)
36-
else:
37-
credentials = cred_fpath # if no path set to None to try read default
38-
3935
with bigquery.Client(
4036
project_id, credentials=credentials, client_info=bq_client_info
4137
) as bq_client:
@@ -61,7 +57,7 @@ def _stream_to_dfs(bqs_client, stream_name, schema, read_kwargs):
6157
def bigquery_read(
6258
make_create_read_session_request: callable,
6359
project_id: str,
64-
cred_fpath: str,
60+
credentials: Credentials,
6561
read_kwargs: dict,
6662
stream_name: str,
6763
) -> pd.DataFrame:
@@ -80,7 +76,7 @@ def bigquery_read(
8076
NOTE: Please set if reading from Storage API without any `row_restriction`.
8177
https://cloud.google.com/bigquery/docs/reference/storage/rpc/google.cloud.bigquery.storage.v1beta1#stream
8278
"""
83-
with bigquery_clients(project_id, cred_fpath) as (_, bqs_client):
79+
with bigquery_clients(project_id, credentials) as (_, bqs_client):
8480
session = bqs_client.create_read_session(make_create_read_session_request())
8581
schema = pyarrow.ipc.read_schema(
8682
pyarrow.py_buffer(session.arrow_schema.serialized_schema)
@@ -125,7 +121,13 @@ def read_gbq(
125121
Dask DataFrame
126122
"""
127123
read_kwargs = read_kwargs or {}
128-
with bigquery_clients(project_id, cred_fpath) as (bq_client, bqs_client):
124+
125+
if cred_fpath:
126+
credentials = service_account.Credentials.from_service_account_file(cred_fpath)
127+
else:
128+
credentials = cred_fpath # if no path set to None to try read default
129+
130+
with bigquery_clients(project_id, credentials) as (bq_client, bqs_client):
129131
table_ref = bq_client.get_table(f"{dataset_id}.{table_id}")
130132
if table_ref.table_type == "VIEW":
131133
raise TypeError("Table type VIEW not supported")
@@ -170,7 +172,7 @@ def make_create_read_session_request(row_filter=""):
170172
bigquery_read,
171173
make_create_read_session_request,
172174
project_id,
173-
cred_fpath,
175+
credentials,
174176
read_kwargs,
175177
),
176178
label=label,

0 commit comments

Comments
 (0)