Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 0 additions & 11 deletions eng/common/pipelines/templates/steps/bypass-local-dns.yml

This file was deleted.

1 change: 0 additions & 1 deletion eng/common/pipelines/templates/steps/verify-agent-os.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,3 @@ steps:
arguments: >
-AgentImage "${{ parameters.AgentImage }}"

- template: /eng/common/pipelines/templates/steps/bypass-local-dns.yml
20 changes: 20 additions & 0 deletions sdk/storage/azure-storage-blob/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,26 @@
### Bugs Fixed
- Fixed an issue where `BlobClient`'s `download_blob` did not retry upon
`ServiceReponseError` and `ServiceResponseTimeoutError` exceptions
- Fixed various issues with configuring logging via `logging_enable` and `logging_body` keywords on a per-request
basis and with retries. Prior to this fix logging may have not behaved as expected, especially on retries.
- Fix a potential memory leak caused by improper exception handling that could occur under rare circumstances.

## 12.30.0b1 (2026-04-01)

### Features Added
- Added support for service version 2026-06-06.
- Added support for connection strings and `account_url`s to accept URLs with `-ipv6` and `-dualstack` suffixes
for `BlobServiceClient`, `ContainerClient`, and `BlobClient`.
- Added support for `create` permission in `BlobSasPermissions` for `stage_block`,
`stage_block_from_url`, and `commit_block_list`.
- Added support for a new `Smart` access tier to `StandardBlobTier` used in `BlobClient.set_standard_blob_tier`,
which is optimized to automatically determine the most cost-effective access with no performance impact.
When set, `BlobProperties.smart_access_tier` will reveal the service's current access
tier choice between `Hot`, `Cool`, and `Archive`.
- Added support for `is_directory` keyword in `generate_blob_sas` that generates directory-level SAS for blobs.

### Other Changes
- Consolidated the behavior of `max_concurrency=None` by defaulting to the shared `DEFAULT_MAX_CONCURRENCY` constant.

## 12.29.0b1 (2026-01-27)

Expand Down
2 changes: 1 addition & 1 deletion sdk/storage/azure-storage-blob/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "python",
"TagPrefix": "python/storage/azure-storage-blob",
"Tag": "python/storage/azure-storage-blob_bd8f6233a4"
"Tag": "python/storage/azure-storage-blob_339150483d"
}
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ def on_request(self, request: "PipelineRequest") -> None:
class StorageLoggingPolicy(NetworkTraceLoggingPolicy):
"""A policy that logs HTTP request and response to the DEBUG logger.

This accepts both global configuration, and per-request level with "enable_http_logger"
This accepts both global configuration, and per-request level with "logging_enable" and "logging_body"
"""

def __init__(self, logging_enable: bool = False, **kwargs) -> None:
Expand All @@ -198,9 +198,24 @@ def __init__(self, logging_enable: bool = False, **kwargs) -> None:
def on_request(self, request: "PipelineRequest") -> None:
http_request = request.http_request
options = request.context.options
self.logging_body = self.logging_body or options.pop("logging_body", False)
if options.pop("logging_enable", self.enable_http_logger):
request.context["logging_enable"] = True

# Check if logging settings are already determined (from a previous retry attempt)
if "logging_enable" not in request.context:
# First attempt - pop from options and store decision in context
# For logging_enable and logging_body, per-request setting will override the global setting
logging_body = options.pop("logging_body", self.logging_body)
logging_enable = options.pop("logging_enable", self.enable_http_logger)

# Only store in context if logging is enabled to avoid polluting context
if logging_enable:
request.context["logging_enable"] = True
request.context["logging_body"] = logging_body
else:
# Retry attempt - use the settings stored in context from the first attempt
logging_enable = request.context.get("logging_enable", False)
logging_body = request.context.get("logging_body", False)

if logging_enable:
if not _LOGGER.isEnabledFor(logging.DEBUG):
return

Expand All @@ -227,7 +242,7 @@ def on_request(self, request: "PipelineRequest") -> None:
_LOGGER.debug(" %r: %r", header, value)
_LOGGER.debug("Request body:")

if self.logging_body:
if logging_body:
_LOGGER.debug(str(http_request.body))
else:
# We don't want to log the binary data of a file upload.
Expand All @@ -236,7 +251,10 @@ def on_request(self, request: "PipelineRequest") -> None:
_LOGGER.debug("Failed to log request: %r", err)

def on_response(self, request: "PipelineRequest", response: "PipelineResponse") -> None:
if response.context.pop("logging_enable", self.enable_http_logger):
# Logging settings should always be present in context if logging is enabled
# Use .get() instead of .pop() to preserve context values for potential retries
if response.context.get("logging_enable", False):
logging_body = response.context.get("logging_body", False)
if not _LOGGER.isEnabledFor(logging.DEBUG):
return

Expand All @@ -260,9 +278,9 @@ def on_response(self, request: "PipelineRequest", response: "PipelineResponse")
elif resp_content_type.startswith("image"):
_LOGGER.debug("Body contains image data.")

if self.logging_body and resp_content_type.startswith("text"):
if logging_body and resp_content_type.startswith("text"):
_LOGGER.debug(response.http_response.text())
elif self.logging_body:
elif logging_body:
try:
_LOGGER.debug(response.http_response.body())
except ValueError:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,11 +190,15 @@ def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # p
error.additional_info = additional_data
# error.args is what's surfaced on the traceback - show error message in all cases
error.args = (error.message,)

try:
# `from None` prevents us from double printing the exception (suppresses generated layer error context)
exec("raise error from None") # pylint: disable=exec-used # nosec
except SyntaxError as exc:
raise error from exc
# `from None` suppresses exception chaining to prevent double printing the exception.
raise error from None
finally:
# Explicitly clears exception references to break circular references
# and allow immediate garbage collection.
error = None
storage_error = None


def parse_to_internal_user_delegation_key(service_user_delegation_key):
Expand Down
213 changes: 213 additions & 0 deletions sdk/storage/azure-storage-blob/tests/test_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,3 +181,216 @@ def test_copy_source_sas_is_scrubbed_off(self, **kwargs):
# the keyword SharedKey is present in the authorization header's value
assert _AUTHORIZATION_HEADER_NAME in log_as_str
assert not 'SharedKey' in log_as_str

@BlobPreparer()
@recorded_by_proxy
def test_logging_body_option_overrides_constructor(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange - Create client with logging_body=True in constructor
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key.secret,
logging_enable=True,
logging_body=True
)
container_name = self.get_resource_name('utcontainer')
container = bsc.get_container_client(container_name)
if self.is_live:
try:
container.create_container()
except:
pass

request_body = 'testoverridelogging'
blob_name = self.get_resource_name("testoverride")
blob_client = container.get_blob_client(blob_name)
blob_client.upload_blob(request_body, overwrite=True)

# Act - Download without logging_body option (should use constructor default=True)
with LogCaptured(self) as log_captured:
blob_client.download_blob()
log_as_str = log_captured.getvalue()
# Assert - Body should be logged
assert request_body in log_as_str

# Act - Download with logging_body=False (should override constructor)
with LogCaptured(self) as log_captured:
blob_client.download_blob(logging_body=False)
log_as_str = log_captured.getvalue()
# Assert - Body should NOT be logged, overriding constructor setting
assert request_body not in log_as_str

# Act - Upload with logging_body=False (test request logging override)
with LogCaptured(self) as log_captured:
blob_client.upload_blob('uploadtest', overwrite=True, logging_body=False)
log_as_str = log_captured.getvalue()
# Assert - Request body should NOT be logged
assert 'uploadtest' not in log_as_str

# Act - Upload/Download with logging_enable=False (should override constructor and disable logging entirely)
with LogCaptured(self) as log_captured:
blob_client.upload_blob('uploadtest', overwrite=True, logging_enable=False)
blob_client.download_blob(logging_enable=False)
log_as_str = log_captured.getvalue()
# Assert - No logging should occur
assert log_as_str == ''

@BlobPreparer()
@recorded_by_proxy
def test_logging_body_isolation_between_requests(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange - Create client with logging_body=True in constructor
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key.secret,
logging_enable=True,
logging_body=True
)
container_name = self.get_resource_name('utcontainer')
container = bsc.get_container_client(container_name)
if self.is_live:
try:
container.create_container()
except:
pass

request_body_1 = 'isolationtest1'
request_body_2 = 'isolationtest2'
blob_name_1 = self.get_resource_name("testblob1")
blob_name_2 = self.get_resource_name("testblob2")
blob_client_1 = container.get_blob_client(blob_name_1)
blob_client_2 = container.get_blob_client(blob_name_2)
blob_client_1.upload_blob(request_body_1, overwrite=True)
blob_client_2.upload_blob(request_body_2, overwrite=True)

# Act - First request with logging_body=False
with LogCaptured(self) as log_captured:
blob_client_1.download_blob(logging_body=False)
log_as_str = log_captured.getvalue()
# Assert - Body should NOT be logged
assert request_body_1 not in log_as_str

# Act - Second request without logging_body option (should revert to constructor default=True)
with LogCaptured(self) as log_captured:
blob_client_2.download_blob()
log_as_str = log_captured.getvalue()
# Assert - Body SHOULD be logged, proving previous request's False didn't persist
assert request_body_2 in log_as_str

# Act - Third request with logging_body=True
with LogCaptured(self) as log_captured:
blob_client_1.download_blob(logging_body=True)
log_as_str = log_captured.getvalue()
# Assert - Body should be logged
assert request_body_1 in log_as_str

# Act - Fourth request without logging_body option (should still use constructor default=True)
with LogCaptured(self) as log_captured:
blob_client_2.download_blob()
log_as_str = log_captured.getvalue()
# Assert - Body SHOULD be logged, proving previous request's True didn't change the default
assert request_body_2 in log_as_str

# Act - Create new client with logging_body=False and verify isolation works in reverse
bsc_no_body = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key.secret,
logging_enable=True,
logging_body=False
)
container_no_body = bsc_no_body.get_container_client(container_name)
blob_client_no_body = container_no_body.get_blob_client(blob_name_1)

# Act - Request with logging_body=True
with LogCaptured(self) as log_captured:
blob_client_no_body.download_blob(logging_body=True)
log_as_str = log_captured.getvalue()
# Assert - Body should be logged
assert request_body_1 in log_as_str

# Act - Next request without logging_body option (should revert to constructor default=False)
with LogCaptured(self) as log_captured:
blob_client_no_body.download_blob()
log_as_str = log_captured.getvalue()
# Assert - Body should NOT be logged, proving previous True didn't persist
assert request_body_1 not in log_as_str

@BlobPreparer()
@recorded_by_proxy
def test_logging_body_option_on_retry(self, **kwargs):
storage_account_name = kwargs.pop("storage_account_name")
storage_account_key = kwargs.pop("storage_account_key")

# Arrange - Create client with logging enabled and retry configured
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key.secret,
logging_enable=True,
retry_total=1,
initial_backoff=0.1,
increment_base=0.1,
)
container_name = self.get_resource_name('utcontainer')
container = bsc.get_container_client(container_name)
if self.is_live:
try:
container.create_container()
except:
pass

request_body = 'testretrylogging'
blob_name = self.get_resource_name("testretry")
blob_client = container.get_blob_client(blob_name)
blob_client.upload_blob(request_body, overwrite=True)

# Test 1: logging_body=False should prevent logging on both original and retry attempts
call_count = 0
def response_hook_fail_once(response):
nonlocal call_count
call_count += 1
if call_count == 1:
response.http_response.status_code = 408 # Request Timeout - triggers retry

with LogCaptured(self) as log_captured:
call_count = 0
blob_client.download_blob(raw_response_hook=response_hook_fail_once, logging_body=False)
log_as_str = log_captured.getvalue()
# Assert - Body should NOT be logged on either attempt
assert request_body not in log_as_str
assert call_count == 2 # Verify retry happened

# Test 2: logging_body=True should log on both original and retry attempts
with LogCaptured(self) as log_captured:
call_count = 0
blob_client.download_blob(raw_response_hook=response_hook_fail_once, logging_body=True)
log_as_str = log_captured.getvalue()
# Assert - Body should be logged on both attempts
assert request_body in log_as_str
assert log_as_str.count(request_body) == 2 # Should appear twice (original + retry)
assert call_count == 2 # Verify retry happened

# Test 3: Verify that logging_body override persists correctly across retries
# even when constructor has logging_body=True
bsc_with_body = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key.secret,
logging_enable=True,
logging_body=True,
retry_total=1,
initial_backoff=0.1,
increment_base=0.1,
)
container_with_body = bsc_with_body.get_container_client(container_name)
blob_client_with_body = container_with_body.get_blob_client(blob_name)

with LogCaptured(self) as log_captured:
call_count = 0
blob_client_with_body.download_blob(raw_response_hook=response_hook_fail_once, logging_body=False)
log_as_str = log_captured.getvalue()
# Assert - logging_body=False should override constructor setting on both attempts
assert request_body not in log_as_str
assert call_count == 2 # Verify retry happened
Loading