Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-file-datalake/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-file-datalake",
"Tag": "python/storage/azure-storage-file-datalake_4ab697f017"
"Tag": "python/storage/azure-storage-file-datalake_3d29de0db8"
Comment thread
nateprewitt marked this conversation as resolved.
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
get_mod_conditions,
get_path_http_headers
)
from ._shared.constants import DEFAULT_MAX_CONCURRENCY
from ._shared.request_handlers import get_length, read_length
from ._shared.response_handlers import return_response_headers
from ._shared.uploads import IterStreamer
Expand Down Expand Up @@ -124,7 +125,7 @@ def _upload_options(
validate_content = kwargs.pop('validate_content', False)
content_settings = kwargs.pop('content_settings', None)
metadata = kwargs.pop('metadata', None)
max_concurrency = kwargs.pop('max_concurrency', 1)
max_concurrency = kwargs.pop('max_concurrency', None) or DEFAULT_MAX_CONCURRENCY
Comment thread
nateprewitt marked this conversation as resolved.
Outdated

kwargs['properties'] = add_metadata_headers(metadata)
kwargs['lease_access_conditions'] = get_access_conditions(kwargs.pop('lease', None))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,6 @@
DEFAULT_OAUTH_SCOPE = "/.default"
STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default"

DEFAULT_MAX_CONCURRENCY = 1

SERVICE_HOST_BASE = "core.windows.net"
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from azure.core.tracing.common import with_current_context

from . import encode_base64, url_quote
from .constants import DEFAULT_MAX_CONCURRENCY
from .request_handlers import get_length
from .response_handlers import return_response_headers

Expand Down Expand Up @@ -52,6 +53,7 @@ def upload_data_chunks(
**kwargs,
):

max_concurrency = max_concurrency or DEFAULT_MAX_CONCURRENCY
Comment thread
nateprewitt marked this conversation as resolved.
Outdated
parallel = max_concurrency > 1
if parallel and "modified_access_conditions" in kwargs:
# Access conditions do not work with parallelism
Expand Down Expand Up @@ -92,6 +94,7 @@ def upload_substream_blocks(
progress_hook=None,
**kwargs,
):
max_concurrency = max_concurrency or DEFAULT_MAX_CONCURRENCY
Comment thread
nateprewitt marked this conversation as resolved.
Outdated
parallel = max_concurrency > 1
if parallel and "modified_access_conditions" in kwargs:
# Access conditions do not work with parallelism
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from typing import AsyncGenerator, Union

from . import encode_base64, url_quote
from .constants import DEFAULT_MAX_CONCURRENCY
from .request_handlers import get_length
from .response_handlers import return_response_headers
from .uploads import SubStream, IterStreamer # pylint: disable=unused-import
Expand Down Expand Up @@ -69,6 +70,7 @@ async def upload_data_chunks(
**kwargs,
):

max_concurrency = max_concurrency or DEFAULT_MAX_CONCURRENCY
Comment thread
nateprewitt marked this conversation as resolved.
Outdated
parallel = max_concurrency > 1
if parallel and "modified_access_conditions" in kwargs:
# Access conditions do not work with parallelism
Expand Down Expand Up @@ -115,6 +117,7 @@ async def upload_substream_blocks(
progress_hook=None,
**kwargs,
):
max_concurrency = max_concurrency or DEFAULT_MAX_CONCURRENCY
Comment thread
nateprewitt marked this conversation as resolved.
Outdated
parallel = max_concurrency > 1
if parallel and "modified_access_conditions" in kwargs:
# Access conditions do not work with parallelism
Expand Down
39 changes: 39 additions & 0 deletions sdk/storage/azure-storage-file-datalake/tests/test_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -692,6 +692,27 @@ def test_upload_data_to_existing_file_with_permission_and_umask(self, **kwargs):
assert data == downloaded_data
assert prop['permissions'] == 'rwxrwxrwx'

@DataLakePreparer()
@recorded_by_proxy
def test_upload_data_with_none_max_concurrency(self, **kwargs):
datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")

self._setUp(datalake_storage_account_name, datalake_storage_account_key)
directory_name = self._get_directory_reference()

# Create a directory to put the file under that
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
directory_client.create_directory()

file_client = directory_client.get_file_client('filename')
data = self.get_random_bytes(100)
# max_concurrency=None should not raise TypeError
file_client.upload_data(data, overwrite=True, max_concurrency=None)

downloaded_data = file_client.download_file().readall()
assert data == downloaded_data

@DataLakePreparer()
@recorded_by_proxy
def test_read_file(self, **kwargs):
Expand All @@ -710,6 +731,24 @@ def test_read_file(self, **kwargs):
downloaded_data = file_client.download_file().readall()
assert data == downloaded_data

@DataLakePreparer()
@recorded_by_proxy
def test_read_file_with_none_max_concurrency(self, **kwargs):
datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")

self._setUp(datalake_storage_account_name, datalake_storage_account_key)
file_client = self._create_file_and_return_client()
data = self.get_random_bytes(1024)

# upload data to file
file_client.append_data(data, 0, len(data))
file_client.flush_data(len(data))

# max_concurrency=None should not raise TypeError
downloaded_data = file_client.download_file(max_concurrency=None).readall()
Comment thread
nateprewitt marked this conversation as resolved.
assert data == downloaded_data

@pytest.mark.live_test_only
@DataLakePreparer()
def test_read_file_with_user_delegation_key(self, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -727,6 +727,27 @@ async def data_generator():
result = await (await file_client.download_file()).readall()
assert result == data*3

@DataLakePreparer()
@recorded_by_proxy_async
async def test_upload_data_with_none_max_concurrency(self, **kwargs):
datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")

await self._setUp(datalake_storage_account_name, datalake_storage_account_key)

# Create a directory to put the file under
directory_name = self._get_directory_reference()
directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
await directory_client.create_directory()

file_client = directory_client.get_file_client('filename')
data = self.get_random_bytes(100)
# max_concurrency=None should not raise TypeError
await file_client.upload_data(data, overwrite=True, max_concurrency=None)

downloaded_data = await (await file_client.download_file()).readall()
assert data == downloaded_data

@DataLakePreparer()
@recorded_by_proxy_async
async def test_read_file(self, **kwargs):
Expand All @@ -745,6 +766,24 @@ async def test_read_file(self, **kwargs):
downloaded_data = await (await file_client.download_file()).readall()
assert data == downloaded_data

@DataLakePreparer()
@recorded_by_proxy_async
async def test_read_file_with_none_max_concurrency(self, **kwargs):
datalake_storage_account_name = kwargs.pop("datalake_storage_account_name")
datalake_storage_account_key = kwargs.pop("datalake_storage_account_key")

await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
file_client = await self._create_file_and_return_client()
data = self.get_random_bytes(1024)

# upload data to file
await file_client.append_data(data, 0, len(data))
await file_client.flush_data(len(data))

# max_concurrency=None should not raise TypeError
downloaded_data = await (await file_client.download_file(max_concurrency=None)).readall()
Comment thread
nateprewitt marked this conversation as resolved.
assert data == downloaded_data

@pytest.mark.live_test_only
@DataLakePreparer()
async def test_read_file_with_user_delegation_key(self, **kwargs):
Expand Down