Skip to content

Scheduled code regeneration test #38417

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 120 additions & 15 deletions eng/emitter-package-lock.json

Large diffs are not rendered by default.

26 changes: 14 additions & 12 deletions eng/emitter-package.json
Original file line number Diff line number Diff line change
@@ -1,23 +1,25 @@
{
"main": "dist/src/index.js",
"dependencies": {
"@azure-tools/typespec-python": "0.42.2"
"@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.42.2-alpha.20250408.1.tgz"
},
"devDependencies": {
"@azure-tools/typespec-autorest": "~0.54.0",
"@azure-tools/typespec-azure-core": "~0.54.0",
"@azure-tools/typespec-azure-resource-manager": "~0.54.0",
"@azure-tools/typespec-azure-rulesets": "~0.54.0",
"@azure-tools/typespec-client-generator-core": "~0.54.0",
"@typespec/compiler": "^1.0.0-0",
"@typespec/events": "~0.68.0",
"@typespec/http": "^1.0.0-0",
"@typespec/rest": "~0.68.0",
"@typespec/versioning": "~0.68.0",
"@typespec/openapi": "^1.0.0-0",
"@typespec/events": "~0.68.0",
"@typespec/rest": "~0.68.0",
"@typespec/sse": "~0.68.0",
"@typespec/streams": "~0.68.0",
"@typespec/xml": "~0.68.0",
"@azure-tools/typespec-azure-core": "~0.54.0",
"@azure-tools/typespec-azure-resource-manager": "~0.54.0",
"@azure-tools/typespec-autorest": "~0.54.0",
"@azure-tools/typespec-azure-rulesets": "~0.54.0",
"@azure-tools/typespec-client-generator-core": "~0.54.0",
"@azure-tools/typespec-liftr-base": "0.8.0"
"@typespec/versioning": "~0.68.0",
"@typespec/xml": "~0.68.0"
},
"overrides": {
"@autorest/python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@autorest/python/-/python-6.32.2-alpha.20250408.1.tgz"
}
}
}
2 changes: 1 addition & 1 deletion sdk/batch/azure-batch/MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ include LICENSE
include azure/batch/py.typed
recursive-include tests *.py
recursive-include samples *.py *.md
include azure/__init__.py
include azure/__init__.py
286 changes: 286 additions & 0 deletions sdk/batch/azure-batch/apiview-properties.json

Large diffs are not rendered by default.

33 changes: 1 addition & 32 deletions sdk/batch/azure-batch/azure/batch/__init__.py
Original file line number Diff line number Diff line change
@@ -1,32 +1 @@
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) Python Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
# pylint: disable=wrong-import-position

from typing import TYPE_CHECKING

if TYPE_CHECKING:
from ._patch import * # pylint: disable=unused-wildcard-import

from ._client import BatchClient # type: ignore
from ._version import VERSION

__version__ = VERSION

try:
from ._patch import __all__ as _patch_all
from ._patch import *
except ImportError:
_patch_all = []
from ._patch import patch_sdk as _patch_sdk

__all__ = [
"BatchClient",
]
__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore

_patch_sdk()
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
55 changes: 27 additions & 28 deletions sdk/batch/azure-batch/azure/batch/_operations/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ def create_tasks(

# deque operations(append/pop) are thread-safe
results_queue: Deque[_models.BatchTaskAddResult] = collections.deque()
task_workflow_manager = _TaskWorkflowManager(
self, job_id=job_id, task_collection=task_collection, **kwargs
)
task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs)

# multi-threaded behavior
if concurrencies:
Expand Down Expand Up @@ -241,18 +239,19 @@ def get_node_file_properties(
creation_time=headers["ocp-creation-time"],
# content_type=headers["Content-Type"], # need to add to typespec
file_mode=headers["ocp-batch-file-mode"],
)
get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore
pool_id,
node_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
)

get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore
pool_id,
node_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
cls=cls,
**kwargs)
**kwargs
)

return get_response

Expand Down Expand Up @@ -309,18 +308,19 @@ def get_task_file_properties(
creation_time=headers["ocp-creation-time"],
# content_type=headers["Content-Type"], # need to add to typespec
file_mode=headers["ocp-batch-file-mode"],
)
get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore
job_id,
task_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
)

get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore
job_id,
task_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
cls=cls,
**kwargs)
**kwargs
)

return get_response

Expand Down Expand Up @@ -523,9 +523,8 @@ def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add):
for task in chunk_tasks_to_add:
if task.id == task_result.task_id:
self.tasks_to_add.appendleft(task)
elif (
task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR
and not (task_result.error and task_result.error.code == "TaskExists")
elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not (
task_result.error and task_result.error.code == "TaskExists"
):
# Client error will be recorded unless Task already exists
self.failure_tasks.appendleft(task_result)
Expand Down
2 changes: 1 addition & 1 deletion sdk/batch/azure-batch/azure/batch/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ class BatchClient(GenerateBatchClient):
def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs):
super().__init__(
endpoint=endpoint,
credential=credential, # type: ignore
credential=credential, # type: ignore
authentication_policy=kwargs.pop(
"authentication_policy", self._format_shared_key_credential("", credential)
),
Expand Down
57 changes: 28 additions & 29 deletions sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,7 @@ async def create_tasks(
kwargs.update({"timeout": timeout, "ocpdate": ocpdate})

results_queue: Deque[_models.BatchTaskAddResult] = collections.deque()
task_workflow_manager = _TaskWorkflowManager(
self, job_id=job_id, task_collection=task_collection, **kwargs
)
task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs)

if concurrencies:
if concurrencies < 0:
Expand Down Expand Up @@ -230,18 +228,19 @@ async def get_node_file_properties(
creation_time=headers["ocp-creation-time"],
# content_type=headers["Content-Type"], # need to add to typespec
file_mode=headers["ocp-batch-file-mode"],
)
get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore
pool_id,
node_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
)

get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore
pool_id,
node_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
cls=cls,
**kwargs)
**kwargs
)

return get_response

Expand Down Expand Up @@ -298,18 +297,19 @@ async def get_task_file_properties(
creation_time=headers["ocp-creation-time"],
# content_type=headers["Content-Type"], # need to add to typespec
file_mode=headers["ocp-batch-file-mode"],
)
get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore
job_id,
task_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
)

get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore
job_id,
task_id,
file_path,
timeout=timeout,
ocpdate=ocpdate,
if_modified_since=if_modified_since,
if_unmodified_since=if_unmodified_since,
cls=cls,
**kwargs)
**kwargs
)

return get_response

Expand Down Expand Up @@ -473,7 +473,7 @@ async def _bulk_add_tasks(
self.tasks_to_add.extendleft(chunk_tasks_to_add[midpoint:])
await self._bulk_add_tasks(results_queue, chunk_tasks_to_add[:midpoint])
# Retry server side errors
elif 500 <= e.response.status_code <= 599: # type: ignore
elif 500 <= e.response.status_code <= 599: # type: ignore
self.tasks_to_add.extendleft(chunk_tasks_to_add)
else:
# Re-add to pending queue as unknown status / don't have result
Expand All @@ -493,9 +493,8 @@ async def _bulk_add_tasks(
for task in chunk_tasks_to_add:
if task.id == task_result.task_id:
self.tasks_to_add.appendleft(task)
elif (
task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR
and not (task_result.error and task_result.error.code == "TaskExists")
elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not (
task_result.error and task_result.error.code == "TaskExists"
):
# Client error will be recorded unless Task already exists
self.failure_tasks.appendleft(task_result)
Expand Down
2 changes: 1 addition & 1 deletion sdk/batch/azure-batch/azure/batch/aio/_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class BatchClient(GenerateBatchClient):
def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs):
super().__init__(
endpoint=endpoint,
credential=credential, # type: ignore
credential=credential, # type: ignore
authentication_policy=kwargs.pop("authentication_policy", self._format_shared_key_credential(credential)),
**kwargs
)
Expand Down
2 changes: 0 additions & 2 deletions sdk/batch/azure-batch/azure/batch/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,6 @@
ExitConditions,
ExitOptions,
FileProperties,
GetCertificateResponse,
HttpHeader,
ImageReference,
InboundEndpoint,
Expand Down Expand Up @@ -337,7 +336,6 @@
"ExitConditions",
"ExitOptions",
"FileProperties",
"GetCertificateResponse",
"HttpHeader",
"ImageReference",
"InboundEndpoint",
Expand Down
Loading
Loading