Skip to content

Commit 4750947

Browse files
authored
upgrade all (#47)
1 parent e87b453 commit 4750947

File tree

330 files changed

+147791
-1796
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

330 files changed

+147791
-1796
lines changed

README.rst

+13
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,19 @@ Handles multi-API versions of Azure Storage Data Plane originally from https://g
1717

1818
Change Log
1919
----------
20+
0.7.0
21+
+++++
22+
* blob:
23+
- Support v2020-06-12(12.8.1)
24+
- Support v2020-10-02(12.9.0)
25+
* fileshare:
26+
- Minor fix for 2020-04-08(12.5.0)
27+
- Support v2020-10-02(12.6.0)
28+
* filedatalake:
29+
- Minor fix for 2020-02-10(12.3.1)
30+
- Support v2020-06-12(12.5.0)
31+
* queue: Minor fix for 2018-03-28(12.1.6)
32+
2033
0.6.2
2134
+++++
2235
* Fix import issue for filedatalake
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,233 @@
1+
# -------------------------------------------------------------------------
2+
# Copyright (c) Microsoft Corporation. All rights reserved.
3+
# Licensed under the MIT License. See License.txt in the project root for
4+
# license information.
5+
# --------------------------------------------------------------------------
6+
import os
7+
8+
from typing import Union, Iterable, AnyStr, IO, Any, Dict # pylint: disable=unused-import
9+
from ._version import VERSION
10+
from ._blob_client import BlobClient
11+
from ._container_client import ContainerClient
12+
from ._blob_service_client import BlobServiceClient
13+
from ._lease import BlobLeaseClient
14+
from ._download import StorageStreamDownloader
15+
from ._quick_query_helper import BlobQueryReader
16+
from ._shared_access_signature import generate_account_sas, generate_container_sas, generate_blob_sas
17+
from ._shared.policies import ExponentialRetry, LinearRetry
18+
from ._shared.response_handlers import PartialBatchErrorException
19+
from ._shared.models import(
20+
LocationMode,
21+
ResourceTypes,
22+
AccountSasPermissions,
23+
StorageErrorCode,
24+
UserDelegationKey
25+
)
26+
from ._generated.models import (
27+
RehydratePriority
28+
)
29+
from ._models import (
30+
BlobType,
31+
BlockState,
32+
StandardBlobTier,
33+
PremiumPageBlobTier,
34+
SequenceNumberAction,
35+
PublicAccess,
36+
BlobAnalyticsLogging,
37+
Metrics,
38+
RetentionPolicy,
39+
StaticWebsite,
40+
CorsRule,
41+
ContainerProperties,
42+
BlobProperties,
43+
FilteredBlob,
44+
LeaseProperties,
45+
ContentSettings,
46+
CopyProperties,
47+
BlobBlock,
48+
PageRange,
49+
AccessPolicy,
50+
ContainerSasPermissions,
51+
BlobSasPermissions,
52+
CustomerProvidedEncryptionKey,
53+
ContainerEncryptionScope,
54+
BlobQueryError,
55+
DelimitedJsonDialect,
56+
DelimitedTextDialect,
57+
ArrowDialect,
58+
ArrowType,
59+
ObjectReplicationPolicy,
60+
ObjectReplicationRule
61+
)
62+
from ._list_blobs_helper import BlobPrefix
63+
64+
__version__ = VERSION
65+
66+
67+
def upload_blob_to_url(
68+
blob_url, # type: str
69+
data, # type: Union[Iterable[AnyStr], IO[AnyStr]]
70+
credential=None, # type: Any
71+
**kwargs):
72+
# type: (...) -> Dict[str, Any]
73+
"""Upload data to a given URL
74+
75+
The data will be uploaded as a block blob.
76+
77+
:param str blob_url:
78+
The full URI to the blob. This can also include a SAS token.
79+
:param data:
80+
The data to upload. This can be bytes, text, an iterable or a file-like object.
81+
:type data: bytes or str or Iterable
82+
:param credential:
83+
The credentials with which to authenticate. This is optional if the
84+
blob URL already has a SAS token. The value can be a SAS token string,
85+
an instance of a AzureSasCredential from azure.core.credentials, an account
86+
shared access key, or an instance of a TokenCredentials class from azure.identity.
87+
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
88+
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
89+
:keyword bool overwrite:
90+
Whether the blob to be uploaded should overwrite the current data.
91+
If True, upload_blob_to_url will overwrite any existing data. If set to False, the
92+
operation will fail with a ResourceExistsError.
93+
:keyword int max_concurrency:
94+
The number of parallel connections with which to download.
95+
:keyword int length:
96+
Number of bytes to read from the stream. This is optional, but
97+
should be supplied for optimal performance.
98+
:keyword dict(str,str) metadata:
99+
Name-value pairs associated with the blob as metadata.
100+
:keyword bool validate_content:
101+
If true, calculates an MD5 hash for each chunk of the blob. The storage
102+
service checks the hash of the content that has arrived with the hash
103+
that was sent. This is primarily valuable for detecting bitflips on
104+
the wire if using http instead of https as https (the default) will
105+
already validate. Note that this MD5 hash is not stored with the
106+
blob. Also note that if enabled, the memory-efficient upload algorithm
107+
will not be used, because computing the MD5 hash requires buffering
108+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
109+
:keyword str encoding:
110+
Encoding to use if text is supplied as input. Defaults to UTF-8.
111+
:returns: Blob-updated property dict (Etag and last modified)
112+
:rtype: dict(str, Any)
113+
"""
114+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
115+
return client.upload_blob(data=data, blob_type=BlobType.BlockBlob, **kwargs)
116+
117+
118+
def _download_to_stream(client, handle, **kwargs):
119+
"""Download data to specified open file-handle."""
120+
stream = client.download_blob(**kwargs)
121+
stream.readinto(handle)
122+
123+
124+
def download_blob_from_url(
125+
blob_url, # type: str
126+
output, # type: str
127+
credential=None, # type: Any
128+
**kwargs):
129+
# type: (...) -> None
130+
"""Download the contents of a blob to a local file or stream.
131+
132+
:param str blob_url:
133+
The full URI to the blob. This can also include a SAS token.
134+
:param output:
135+
Where the data should be downloaded to. This could be either a file path to write to,
136+
or an open IO handle to write to.
137+
:type output: str or writable stream.
138+
:param credential:
139+
The credentials with which to authenticate. This is optional if the
140+
blob URL already has a SAS token or the blob is public. The value can be a SAS token string,
141+
an instance of a AzureSasCredential from azure.core.credentials,
142+
an account shared access key, or an instance of a TokenCredentials class from azure.identity.
143+
If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
144+
- except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.
145+
:keyword bool overwrite:
146+
Whether the local file should be overwritten if it already exists. The default value is
147+
`False` - in which case a ValueError will be raised if the file already exists. If set to
148+
`True`, an attempt will be made to write to the existing file. If a stream handle is passed
149+
in, this value is ignored.
150+
:keyword int max_concurrency:
151+
The number of parallel connections with which to download.
152+
:keyword int offset:
153+
Start of byte range to use for downloading a section of the blob.
154+
Must be set if length is provided.
155+
:keyword int length:
156+
Number of bytes to read from the stream. This is optional, but
157+
should be supplied for optimal performance.
158+
:keyword bool validate_content:
159+
If true, calculates an MD5 hash for each chunk of the blob. The storage
160+
service checks the hash of the content that has arrived with the hash
161+
that was sent. This is primarily valuable for detecting bitflips on
162+
the wire if using http instead of https as https (the default) will
163+
already validate. Note that this MD5 hash is not stored with the
164+
blob. Also note that if enabled, the memory-efficient upload algorithm
165+
will not be used, because computing the MD5 hash requires buffering
166+
entire blocks, and doing so defeats the purpose of the memory-efficient algorithm.
167+
:rtype: None
168+
"""
169+
overwrite = kwargs.pop('overwrite', False)
170+
with BlobClient.from_blob_url(blob_url, credential=credential) as client:
171+
if hasattr(output, 'write'):
172+
_download_to_stream(client, output, **kwargs)
173+
else:
174+
if not overwrite and os.path.isfile(output):
175+
raise ValueError("The file '{}' already exists.".format(output))
176+
with open(output, 'wb') as file_handle:
177+
_download_to_stream(client, file_handle, **kwargs)
178+
179+
180+
__all__ = [
181+
'upload_blob_to_url',
182+
'download_blob_from_url',
183+
'BlobServiceClient',
184+
'ContainerClient',
185+
'BlobClient',
186+
'BlobType',
187+
'BlobLeaseClient',
188+
'StorageErrorCode',
189+
'UserDelegationKey',
190+
'ExponentialRetry',
191+
'LinearRetry',
192+
'LocationMode',
193+
'BlockState',
194+
'StandardBlobTier',
195+
'PremiumPageBlobTier',
196+
'SequenceNumberAction',
197+
'PublicAccess',
198+
'BlobAnalyticsLogging',
199+
'Metrics',
200+
'RetentionPolicy',
201+
'StaticWebsite',
202+
'CorsRule',
203+
'ContainerProperties',
204+
'BlobProperties',
205+
'BlobPrefix',
206+
'FilteredBlob',
207+
'LeaseProperties',
208+
'ContentSettings',
209+
'CopyProperties',
210+
'BlobBlock',
211+
'PageRange',
212+
'AccessPolicy',
213+
'ContainerSasPermissions',
214+
'BlobSasPermissions',
215+
'ResourceTypes',
216+
'AccountSasPermissions',
217+
'StorageStreamDownloader',
218+
'CustomerProvidedEncryptionKey',
219+
'RehydratePriority',
220+
'generate_account_sas',
221+
'generate_container_sas',
222+
'generate_blob_sas',
223+
'PartialBatchErrorException',
224+
'ContainerEncryptionScope',
225+
'BlobQueryError',
226+
'DelimitedJsonDialect',
227+
'DelimitedTextDialect',
228+
'ArrowDialect',
229+
'ArrowType',
230+
'BlobQueryReader',
231+
'ObjectReplicationPolicy',
232+
'ObjectReplicationRule'
233+
]

0 commit comments

Comments
 (0)