[AI-CognitiveServices] Migrate packages from setup.py to pyproject.toml #46650
Build #20260504.14 had test failures
Details
- Failed: 73 (0.17%)
- Passed: 34,355 (81.12%)
- Other: 7,924 (18.71%)
- Total: 42,352
Annotations
Check failure on line 2393 in Build log
azure-pipelines / python - pullrequest
Build log #L2393
The process '/mnt/vss/_work/1/s/venv/bin/python' failed with exit code 2
Check failure on line 83554 in Build log
azure-pipelines / python - pullrequest
Build log #L83554
There are one or more test failures detected in result files. Detailed summary of published test results can be viewed in the Tests tab.
Check failure on line 1417 in Build log
azure-pipelines / python - pullrequest
Build log #L1417
The process '/mnt/vss/_work/1/s/venv/bin/python' failed with exit code 1
Check failure on line 83553 in Build log
azure-pipelines / python - pullrequest
Build log #L83553
There are one or more test failures detected in result files. Detailed summary of published test results can be viewed in the Tests tab.
Check failure on line 1 in test_party_operations
azure-pipelines / python - pullrequest
test_party_operations
AssertionError: assert 'Sanitized' == 'test-party-39574'
- test-party-39574
+ Sanitized
Raw output
args = (<test_farm_hierarchy.TestFarmHierarchy object at 0x7f9393542650>,)
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
trimmed_kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
test_id = 'sdk/agrifood/azure-agrifood-farming/tests/recordings/test_farm_hierarchy.pyTestFarmHierarchytest_party_operations'
variables = {}
make_combined_call = <function _make_proxy_decorator.<locals>._decorator.<locals>.record_wrap.<locals>.make_combined_call at 0x7f93910b0790>
test_variables = None, test_run = False
originals = [(<class 'azure.core.pipeline.transport._requests_basic.RequestsTransport'>, 'send', <function RequestsTransport.send at 0x7f9392eb8ca0>)]
owner = <class 'azure.core.pipeline.transport._requests_basic.RequestsTransport'>
name = 'send', original = <function RequestsTransport.send at 0x7f9392eb8ca0>
def record_wrap(*args, **kwargs):
# ---- your existing trimming/early-exit logic ----
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
# Build a wrapper factory so each patched method closes over its own original
def make_combined_call(original_transport_func, is_httpx=False):
def combined_call(*call_args, **call_kwargs):
if is_httpx:
adjusted_args, adjusted_kwargs = _transform_httpx_args(recording_id, *call_args, **call_kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
restore_httpx_response_url(result)
else:
adjusted_args, adjusted_kwargs = _transform_args(recording_id, *call_args, **call_kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
# rewrite request.url to the original upstream for LROs, etc.
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {"scheme": upstream_uri.scheme, "netloc": upstream_uri.netloc}
original_target = parsed_result._replace(**upstream_uri_dict).geturl()
result.request.url = original_target
return result
return combined_call
# Patch multiple transports and ensure restoration
test_variables = None
test_run = False
originals = []
# monkeypatch all requested transports
for owner, name in transports:
original = getattr(owner, name)
# Check if this is an httpx transport by comparing with httpx transport classes
is_httpx_transport = (
(HTTPXTransport is not None and owner is HTTPXTransport)
or (AsyncHTTPXTransport is not None and owner is AsyncHTTPXTransport)
or (httpx is not None and owner.__module__.startswith("httpx"))
)
setattr(owner, name, make_combined_call(original, is_httpx=is_httpx_transport))
originals.append((owner, name, original))
try:
try:
> test_variables = test_func(*args, variables=variables, **trimmed_kwargs)
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/devtools_testutils/proxy_testcase.py:349:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <test_farm_hierarchy.TestFarmHierarchy object at 0x7f9393542650>
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'va
Check failure on line 1 in test_party_operations
azure-pipelines / python - pullrequest
test_party_operations
AssertionError: assert 'Sanitized' == 'test-party-25486'
- test-party-25486
+ Sanitized
Raw output
args = (<test_farm_hierarchy_async.TestFarmHierarchyAsync object at 0x7f9393543f70>,)
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
trimmed_kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
test_id = 'sdk/agrifood/azure-agrifood-farming/tests/recordings/test_farm_hierarchy_async.pyTestFarmHierarchyAsynctest_party_operations'
variables = {}
make_combined_call = <function _make_proxy_decorator_async.<locals>._decorator.<locals>.record_wrap.<locals>.make_combined_call at 0x7f9391bcdfc0>
test_variables = None, test_run = False
originals = [(<class 'azure.core.pipeline.transport._aiohttp.AioHttpTransport'>, 'send', <function AioHttpTransport.send at 0x7f93921bd1b0>)]
owner = <class 'azure.core.pipeline.transport._aiohttp.AioHttpTransport'>
name = 'send', original = <function AioHttpTransport.send at 0x7f93921bd1b0>
async def record_wrap(*args, **kwargs):
# ---- your existing trimming/early-exit logic ----
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return await test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
# Build a wrapper factory so each patched method closes over its own original
def make_combined_call(original_transport_func, is_httpx=False):
async def combined_call(*call_args, **call_kwargs):
if is_httpx:
adjusted_args, adjusted_kwargs = _transform_httpx_args(recording_id, *call_args, **call_kwargs)
result = await original_transport_func(*adjusted_args, **adjusted_kwargs)
restore_httpx_response_url(result)
else:
adjusted_args, adjusted_kwargs = _transform_args(recording_id, *call_args, **call_kwargs)
result = await original_transport_func(*adjusted_args, **adjusted_kwargs)
# rewrite request.url to the original upstream for LROs, etc.
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {"scheme": upstream_uri.scheme, "netloc": upstream_uri.netloc}
original_target = parsed_result._replace(**upstream_uri_dict).geturl()
result.request.url = original_target
return result
return combined_call
# Patch multiple transports and ensure restoration
test_variables = None
test_run = False
originals = []
# monkeypatch all requested transports
for owner, name in transports:
original = getattr(owner, name)
# Check if this is an httpx transport by comparing with httpx transport classes
is_httpx_transport = (AsyncHTTPXTransport is not None and owner is AsyncHTTPXTransport) or (
httpx is not None and owner.__module__.startswith("httpx")
)
setattr(owner, name, make_combined_call(original, is_httpx=is_httpx_transport))
originals.append((owner, name, original))
try:
try:
> test_variables = await test_func(*args, variables=variables, **trimmed_kwargs)
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/devtools_testutils/aio/proxy_testcase_async.py:148:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <test_farm_hierarchy_async.TestFarmHierarchyAsync object at 0x7f9393543f70>
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'variables': {}}
agrifood_endp
Check failure on line 1 in test_satellite_flow
azure-pipelines / python - pullrequest
test_satellite_flow
azure.core.exceptions.HttpResponseError: Operation returned an invalid status 'Not Found'
Content: {"Message":"Unable to find a record for the request GET https://Sanitized.com/\nUri doesn\u0027t match:\n request \u003Chttps://Sanitized.com/\u003E\n record \u003Chttps://Sanitized.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47/v2.0/.well-known/openid-configuration\u003E\nHeader differences:\nBody differences:\nRemaining Entries:\n0: https://Sanitized.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47/v2.0/.well-known/openid-configuration\n1: https://Sanitized.microsoftonline.com/common/discovery/instance?api-version=1.1\u0026authorization_endpoint=https://Sanitized.microsoftonline.com/common/oauth2/authorize\n2: https://Sanitized.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47/oauth2/v2.0/token\n3: https://Sanitized.farmbeats.azure.net/scenes/satellite/ingest-data/satellite-flow-asdfjob-47453?api-version=2022-11-01-preview\n4: https://Sanitized.farmbeats.azure.net/scenes?provider=Microsoft\u0026partyId=test-party-39735\u0026boundaryId=satellite-flow-asdftest-boundary\u0026source=Sentinel_2_L2A\u0026startDateTime=2020-01-01T00%3A00%3A00.000Z\u0026endDateTime=2020-01-31T00%3A00%3A00.000Z\u0026maxCloudCoveragePercentage=100.0\u0026maxDarkPixelCoveragePercentage=100.0\u0026api-version=2022-11-01-preview\n5: https://Sanitized.farmbeats.azure.net/scenes/downloadFiles?filePath=Microsoft%2FSentinel_2_L2A%2Ftest-party-39735%2Fsatellite-flow-asdftest-boundary%2F2020-01-30%2F00-00-00%2Fndvi_10.tif\u0026api-version=2022-11-01-preview\n","Status":"NotFound"}
Raw output
self = <azure.core.polling.base_polling.LROBasePolling object at 0x7f9392c26770>
def run(self):
try:
> self._poll()
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/base_polling.py:517:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/base_polling.py:561: in _poll
_raise_if_bad_http_status_and_method(self._pipeline_response.http_response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
response = <HttpResponse: 404 Not Found, Content-Type: application/json;>
def _raise_if_bad_http_status_and_method(response):
# type: (ResponseType) -> None
"""Check response status code is valid.
Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 201, 202, 204}:
return
> raise BadStatus(
"Invalid return status {!r} for {!r} operation".format(
code, response.request.method
)
)
E azure.core.polling.base_polling.BadStatus: Invalid return status 404 for 'GET' operation
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/base_polling.py:114: BadStatus
During handling of the above exception, another exception occurred:
args = (<test_satellite_flow.TestFarmBeatsSatelliteJob object at 0x7f939107ff40>,)
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
trimmed_kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
test_id = 'sdk/agrifood/azure-agrifood-farming/tests/recordings/test_satellite_flow.pyTestFarmBeatsSatelliteJobtest_satellite_flow'
variables = {}
make_combined_call = <function _make_proxy_decorator.<locals>._decorator.<locals>.record_wrap.<locals>.make_combined_call at 0x7f93925e79a0>
test_variables = None, test_run = False
originals = [(<class 'azure.core.pipeline.transport._requests_basic.RequestsTransport'>, 'send', <function RequestsTransport.send at 0x7f9392eb8ca0>)]
owner = <class 'azure.core.pipeline.transport._requests_basic.RequestsTransport'>
name = 'send', original = <function RequestsTransport.send at 0x7f9392eb8ca0>
def record_wrap(*args, **kwargs):
# ---- your existing trimming/early-exit logic ----
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
# Build a wrapper factory so each patched method closes over its own original
def make_combined_call(original_transport_func, is_httpx=False):
def combined_call(*call_args, **call_kwargs):
if is_httpx:
adjusted_args, adjusted_kwargs = _transform_httpx_args(recording_id, *call_args, **call_kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
restore_httpx_response_url(result)
else:
adjusted_args, adjusted_kwargs = _transform_args(recording_id, *call_args, **call_kwargs)
result = original_transport_func(*adjusted_args, **adjusted_kwargs)
# rewrite request.url to the original upstream for LROs, etc.
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.urlparse(result.request.headers["x-recording-upstream-base-uri"])
upstream_uri_dict = {"sche
Check failure on line 1 in test_satellite_flow
azure-pipelines / python - pullrequest
test_satellite_flow
azure.core.exceptions.HttpResponseError: Operation returned an invalid status 'Not Found'
Content: {"Message":"Unable to find a record for the request GET https://Sanitized.com/\nUri doesn\u0027t match:\n request \u003Chttps://Sanitized.com/\u003E\n record \u003Chttps://Sanitized.farmbeats.azure.net/scenes/satellite/ingest-data/satellite-flow-asdfjob-47465?api-version=2022-11-01-preview\u003E\nHeader differences:\nBody differences:\nRemaining Entries:\n0: https://Sanitized.microsoftonline.com/72f988bf-86f1-41af-91ab-2d7cd011db47/oauth2/v2.0/token\n1: https://Sanitized.farmbeats.azure.net/scenes/satellite/ingest-data/satellite-flow-asdfjob-47465?api-version=2022-11-01-preview\n2: https://Sanitized.farmbeats.azure.net/scenes?provider=Microsoft\u0026partyId=test-party-29476\u0026boundaryId=satellite-flow-asdftest-boundary\u0026source=Sentinel_2_L2A\u0026startDateTime=2020-01-01T00:00:00.000Z\u0026endDateTime=2020-01-31T00:00:00.000Z\u0026maxCloudCoveragePercentage=100.0\u0026maxDarkPixelCoveragePercentage=100.0\u0026api-version=2022-11-01-preview\n3: https://Sanitized.farmbeats.azure.net/scenes/downloadFiles?filePath=Microsoft/Sentinel_2_L2A/test-party-29476/satellite-flow-asdftest-boundary/2020-01-30/00-00-00/ndvi_10.tif\u0026api-version=2022-11-01-preview\n","Status":"NotFound"}
Raw output
self = <azure.core.polling.async_base_polling.AsyncLROBasePolling object at 0x7f9392220040>
async def run(self): # pylint:disable=invalid-overridden-method
try:
> await self._poll()
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/async_base_polling.py:46:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/async_base_polling.py:90: in _poll
_raise_if_bad_http_status_and_method(self._pipeline_response.http_response)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
response = <AsyncHttpResponse: 404 Not Found, Content-Type: application/json;>
def _raise_if_bad_http_status_and_method(response):
# type: (ResponseType) -> None
"""Check response status code is valid.
Must be 200, 201, 202, or 204.
:raises: BadStatus if invalid status.
"""
code = response.status_code
if code in {200, 201, 202, 204}:
return
> raise BadStatus(
"Invalid return status {!r} for {!r} operation".format(
code, response.request.method
)
)
E azure.core.polling.base_polling.BadStatus: Invalid return status 404 for 'GET' operation
../../../.venv/azure-agrifood-farming/.venv_mindependency/lib/python3.10/site-packages/azure/core/polling/base_polling.py:114: BadStatus
During handling of the above exception, another exception occurred:
args = (<test_satellite_flow_async.TestFarmBeatsSatelliteJob object at 0x7f9393042620>,)
kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
trimmed_kwargs = {'__aggregate_cache_key': ('EnvironmentVariableLoader',), 'agrifood_endpoint': 'https://fakeaccount.farmbeats.azure.net'}
test_id = 'sdk/agrifood/azure-agrifood-farming/tests/recordings/test_satellite_flow_async.pyTestFarmBeatsSatelliteJobtest_satellite_flow'
variables = {}
make_combined_call = <function _make_proxy_decorator_async.<locals>._decorator.<locals>.record_wrap.<locals>.make_combined_call at 0x7f9391b6dfc0>
test_variables = None, test_run = False
originals = [(<class 'azure.core.pipeline.transport._aiohttp.AioHttpTransport'>, 'send', <function AioHttpTransport.send at 0x7f93921bd1b0>)]
owner = <class 'azure.core.pipeline.transport._aiohttp.AioHttpTransport'>
name = 'send', original = <function AioHttpTransport.send at 0x7f93921bd1b0>
async def record_wrap(*args, **kwargs):
# ---- your existing trimming/early-exit logic ----
trimmed_kwargs = {k: v for k, v in kwargs.items()}
trim_kwargs_from_test_function(test_func, trimmed_kwargs)
if is_live_and_not_recording():
return await test_func(*args, **trimmed_kwargs)
test_id = get_test_id()
recording_id, variables = start_record_or_playback(test_id)
# Build a wrapper factory so each patched method closes over its own original
def make_combined_call(original_transport_func, is_httpx=False):
async def combined_call(*call_args, **call_kwargs):
if is_httpx:
adjusted_args, adjusted_kwargs = _transform_httpx_args(recording_id, *call_args, **call_kwargs)
result = await original_transport_func(*adjusted_args, **adjusted_kwargs)
restore_httpx_response_url(result)
else:
adjusted_args, adjusted_kwargs = _transform_args(recording_id, *call_args, **call_kwargs)
result = await original_transport_func(*adjusted_args, **adjusted_kwargs)
# rewrite request.url to the original upstream for LROs, etc.
parsed_result = url_parse.urlparse(result.request.url)
upstream_uri = url_parse.