From d3f7b3f668a9b797efc9c3378a01847d50d756ac Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Tue, 8 Apr 2025 07:12:53 +0000 Subject: [PATCH 1/2] Initialize repository for autorest build 20250408.1 --- eng/emitter-package-lock.json | 135 ++++++++++++++++++++++++++++++---- eng/emitter-package.json | 26 ++++--- 2 files changed, 134 insertions(+), 27 deletions(-) diff --git a/eng/emitter-package-lock.json b/eng/emitter-package-lock.json index 54e432921693..e5908e602ae1 100644 --- a/eng/emitter-package-lock.json +++ b/eng/emitter-package-lock.json @@ -1,11 +1,11 @@ { - "name": "TempTypeSpecFiles", + "name": "emitter-consumer", "lockfileVersion": 3, "requires": true, "packages": { "": { "dependencies": { - "@azure-tools/typespec-python": "0.42.2" + "@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.42.2-alpha.20250408.1.tgz" }, "devDependencies": { "@azure-tools/typespec-autorest": "~0.54.0", @@ -13,7 +13,6 @@ "@azure-tools/typespec-azure-resource-manager": "~0.54.0", "@azure-tools/typespec-azure-rulesets": "~0.54.0", "@azure-tools/typespec-client-generator-core": "~0.54.0", - "@azure-tools/typespec-liftr-base": "0.8.0", "@typespec/compiler": "^1.0.0-0", "@typespec/events": "~0.68.0", "@typespec/http": "^1.0.0-0", @@ -29,6 +28,7 @@ "version": "0.54.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-autorest/-/typespec-autorest-0.54.0.tgz", "integrity": "sha512-7Oh8R48CQfeiFFfrMTKdEozpx/riQe+KENkd6wn1Oku7aZJ/GDsPidwiu98sCBeSXeJhc3/UlHmxMZWgiat5KQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -48,6 +48,7 @@ "version": "0.54.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-core/-/typespec-azure-core-0.54.0.tgz", "integrity": "sha512-rlUK9j/1mHUHaNPzOibz1aeeUnROOrNlTPDmnHOfbo4WP0NwV4tDU3rnoUCZxwabVQGKb9U7VTsunb74AzAafg==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -62,6 +63,7 @@ "version": "0.54.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-resource-manager/-/typespec-azure-resource-manager-0.54.0.tgz", "integrity": "sha512-SKBMvBy3wD44ZIHjOmQcvYgWYnk4WcDOhXn1kLSgiYiX74zpv48G9sl8ic1AneREq5UtGNwZ4rdMFWY7BW+8hg==", + "dev": true, "license": "MIT", "dependencies": { "change-case": "~5.4.4", @@ -83,6 +85,7 @@ "version": "0.54.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-azure-rulesets/-/typespec-azure-rulesets-0.54.0.tgz", "integrity": "sha512-Pupm5D76JEV4SMesXEEpei7JcErJSt0agVMXH9KjFXRfUYX+coBwfkP+mu3ViZQ+DRgC21qyHk8SxT2ffxxK2Q==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -98,6 +101,7 @@ "version": "0.54.0", "resolved": "https://registry.npmjs.org/@azure-tools/typespec-client-generator-core/-/typespec-client-generator-core-0.54.0.tgz", "integrity": "sha512-qZR6FgB+wKfF5aRQtEwjUo6xgw1MomqyFwJf6WL+xstHDs7np3jBja43OCdJaooPzAknYWh2V+Hv77/fLFd9Aw==", + "dev": true, "license": "MIT", "dependencies": { "change-case": "~5.4.4", @@ -120,16 +124,10 @@ "@typespec/xml": "^0.68.0" } }, - "node_modules/@azure-tools/typespec-liftr-base": { - "version": "0.8.0", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-liftr-base/-/typespec-liftr-base-0.8.0.tgz", - "integrity": "sha512-xftTTtVjDuxIzugQ9nL/abmttdDM3HAf5HhqKzs9DO0Kl0ZhXQlB2DYlT1hBs/N+IWerMF9k2eKs2RncngA03g==", - "dev": true - }, "node_modules/@azure-tools/typespec-python": { - "version": "0.42.2", - "resolved": "https://registry.npmjs.org/@azure-tools/typespec-python/-/typespec-python-0.42.2.tgz", - "integrity": "sha512-N421iOPt23yTjY5DtdicakPFO8s9DBMMDdnfQl8wgz/VY01osb6oiYSysDI1wUVZrMzS00QwEFkgH7mEViLyEw==", + "version": "0.42.2-alpha.20250408.1", + "resolved": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.42.2-alpha.20250408.1.tgz", + "integrity": "sha512-8QgI62zIW1kp/EM7tugD/NeRptZTcHA2fYpT+AH8kXvTRDxWdXxl0AC0Tt/eL5av4A4sINfjK/xipjPbbznCwQ==", "hasInstallScript": true, "license": "MIT", "dependencies": { @@ -163,6 +161,7 @@ "version": "7.26.2", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", + "dev": true, "license": "MIT", "dependencies": { "@babel/helper-validator-identifier": "^7.25.9", @@ -177,6 +176,7 @@ "version": "7.25.9", "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=6.9.0" @@ -586,6 +586,7 @@ "version": "4.1.5", "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-4.1.5.tgz", "integrity": "sha512-swPczVU+at65xa5uPfNP9u3qx/alNwiaykiI/ExpsmMSQW55trmZcwhYWzw/7fj+n6Q8z1eENvR7vFfq9oPSAQ==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -610,6 +611,7 @@ "version": "5.1.9", "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-5.1.9.tgz", "integrity": "sha512-NgQCnHqFTjF7Ys2fsqK2WtnA8X1kHyInyG+nMIuHowVTIgIuS10T4AznI/PvbqSpJqjCUqNBlKGh1v3bwLFL4w==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -631,6 +633,7 @@ "version": "10.1.10", "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-10.1.10.tgz", "integrity": "sha512-roDaKeY1PYY0aCqhRmXihrHjoSW2A00pV3Ke5fTpMCkzcGF64R8e0lw3dK+eLEHwS4vB5RnW1wuQmvzoRul8Mw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/figures": "^1.0.11", @@ -658,6 +661,7 @@ "version": "4.2.10", "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-4.2.10.tgz", "integrity": "sha512-5GVWJ+qeI6BzR6TIInLP9SXhWCEcvgFQYmcRG6d6RIlhFjM5TyG18paTGBgRYyEouvCmzeco47x9zX9tQEofkw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -680,6 +684,7 @@ "version": "4.0.12", "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-4.0.12.tgz", "integrity": "sha512-jV8QoZE1fC0vPe6TnsOfig+qwu7Iza1pkXoUJ3SroRagrt2hxiL+RbM432YAihNR7m7XnU0HWl/WQ35RIGmXHw==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -702,6 +707,7 @@ "version": "1.0.11", "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.11.tgz", "integrity": "sha512-eOg92lvrn/aRUqbxRyvpEWnrvRuTYRifixHkYVpJiygTgVSBIHDqLh0SrMQXkafvULg3ck11V7xvR+zcgvpHFw==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -711,6 +717,7 @@ "version": "4.1.9", "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-4.1.9.tgz", "integrity": "sha512-mshNG24Ij5KqsQtOZMgj5TwEjIf+F2HOESk6bjMwGWgcH5UBe8UoljwzNFHqdMbGYbgAf6v2wU/X9CAdKJzgOA==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -732,6 +739,7 @@ "version": "3.0.12", "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-3.0.12.tgz", "integrity": "sha512-7HRFHxbPCA4e4jMxTQglHJwP+v/kpFsCf2szzfBHy98Wlc3L08HL76UDiA87TOdX5fwj2HMOLWqRWv9Pnn+Z5Q==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -753,6 +761,7 @@ "version": "4.0.12", "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-4.0.12.tgz", "integrity": "sha512-FlOB0zvuELPEbnBYiPaOdJIaDzb2PmJ7ghi/SVwIHDDSQ2K4opGBkF+5kXOg6ucrtSUQdLhVVY5tycH0j0l+0g==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -775,6 +784,7 @@ "version": "7.4.1", "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-7.4.1.tgz", "integrity": "sha512-UlmM5FVOZF0gpoe1PT/jN4vk8JmpIWBlMvTL8M+hlvPmzN89K6z03+IFmyeu/oFCenwdwHDr2gky7nIGSEVvlA==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/checkbox": "^4.1.5", @@ -804,6 +814,7 @@ "version": "4.0.12", "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-4.0.12.tgz", "integrity": "sha512-wNPJZy8Oc7RyGISPxp9/MpTOqX8lr0r+lCCWm7hQra+MDtYRgINv1hxw7R+vKP71Bu/3LszabxOodfV/uTfsaA==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -826,6 +837,7 @@ "version": "3.0.12", "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-3.0.12.tgz", "integrity": "sha512-H/kDJA3kNlnNIjB8YsaXoQI0Qccgf0Na14K1h8ExWhNmUg2E941dyFPrZeugihEa9AZNW5NdsD/NcvUME83OPQ==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -849,6 +861,7 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-4.1.1.tgz", "integrity": "sha512-IUXzzTKVdiVNMA+2yUvPxWsSgOG4kfX93jOM4Zb5FgujeInotv5SPIJVeXQ+fO4xu7tW8VowFhdG5JRmmCyQ1Q==", + "dev": true, "license": "MIT", "dependencies": { "@inquirer/core": "^10.1.10", @@ -873,6 +886,7 @@ "version": "3.0.6", "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-3.0.6.tgz", "integrity": "sha512-/mKVCtVpyBu3IDarv0G+59KC4stsD5mDsGpYh+GKs1NZT88Jh52+cuoA1AtLk2Q0r/quNl+1cSUyLRHBFeD0XA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -890,6 +904,7 @@ "version": "4.0.1", "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz", "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==", + "dev": true, "license": "ISC", "dependencies": { "minipass": "^7.0.4" @@ -902,6 +917,7 @@ "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", @@ -915,6 +931,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, "license": "MIT", "engines": { "node": ">= 8" @@ -924,6 +941,7 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.scandir": "2.1.5", @@ -937,6 +955,7 @@ "version": "2.3.0", "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -949,6 +968,7 @@ "version": "1.0.0-rc.0", "resolved": "https://registry.npmjs.org/@typespec/compiler/-/compiler-1.0.0-rc.0.tgz", "integrity": "sha512-2N5DCFzuPt5rPXReE4T1boZrG60sr3dTMgZOS/WX+Rosc6iFj2v1ULTI2ySXk/Abd3oxS5OR24l8veIWEi0lzw==", + "dev": true, "license": "MIT", "dependencies": { "@babel/code-frame": "~7.26.2", @@ -981,6 +1001,7 @@ "version": "7.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", + "dev": true, "license": "ISC", "bin": { "semver": "bin/semver.js" @@ -993,6 +1014,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/events/-/events-0.68.0.tgz", "integrity": "sha512-U2y9K8QJ6HsmNxEyHz2aG2bmD05FIsLkmIZgmNaHDwhN1oyI8EH1NkxvwZCnEkPAN7ReuLYK6blouWFWX3s5eg==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1005,6 +1027,7 @@ "version": "1.0.0-rc.0", "resolved": "https://registry.npmjs.org/@typespec/http/-/http-1.0.0-rc.0.tgz", "integrity": "sha512-Or2hhDXy8DZZoy3B/HudSrRHTFomiv6DI3vRpPKYT9ocIAxMGo1hQvqKye8uVk/QIMn/ouv6JUlP+pqjpfnPyw==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1056,6 +1079,7 @@ "version": "1.0.0-rc.0", "resolved": "https://registry.npmjs.org/@typespec/openapi/-/openapi-1.0.0-rc.0.tgz", "integrity": "sha512-aswkRlzFI44CGe05qkzInA7jEhUKNxZYToYi5kXz05Jl5d4nh4VeEkCweb2pRL+4LKd2SZiOn09nXm+OKp5EoQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1069,6 +1093,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/rest/-/rest-0.68.0.tgz", "integrity": "sha512-VJBEpC0MCFPPN6acc5o0fwQm4WMjMEl3aBHE+71XYkagsqb31rYSyfgfBMvHWaEMJV4dVk5T787/q6AWDzEE8g==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1082,6 +1107,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/sse/-/sse-0.68.0.tgz", "integrity": "sha512-sePc+14iw8BZjBPwBaCL23y7lDWrUtmoYuPbfxJhRcIzbv2ww5d7mjvv5C2fjWyfRvG7tJ6wDk8YoHQJDoqtVA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1097,6 +1123,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/streams/-/streams-0.68.0.tgz", "integrity": "sha512-FsyPYOcPA6CDptdsAI0kiwR9tG6pngf5Bi4PiKTsXwseu93v5Y4keLNr4SR+bNQQK6uYIm0OkoK34Z6qn6uZEw==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1109,6 +1136,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/versioning/-/versioning-0.68.0.tgz", "integrity": "sha512-nsK0hbOeqfsNo1dsP64A4Ks0C/FEk5WJ5LEfgTwvFGdE48mHrj7UJdp58Ps5F6moiR9U20P1rHbo+mE0LDIRvA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1121,6 +1149,7 @@ "version": "0.68.0", "resolved": "https://registry.npmjs.org/@typespec/xml/-/xml-0.68.0.tgz", "integrity": "sha512-uB904g9KMkuYKmGZnJsuozjPX+AKzZNStdXvMLq8+TkOitpJcb1dHtFH6KufG21xWuF0bmRUSkJvO4MOsuQNLA==", + "dev": true, "license": "MIT", "engines": { "node": ">=20.0.0" @@ -1133,6 +1162,7 @@ "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, "license": "MIT", "dependencies": { "fast-deep-equal": "^3.1.3", @@ -1149,6 +1179,7 @@ "version": "4.3.2", "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, "license": "MIT", "dependencies": { "type-fest": "^0.21.3" @@ -1164,6 +1195,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -1173,6 +1205,7 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, "license": "MIT", "dependencies": { "color-convert": "^2.0.1" @@ -1194,6 +1227,7 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -1206,18 +1240,21 @@ "version": "5.4.4", "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", "integrity": "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==", + "dev": true, "license": "MIT" }, "node_modules/chardet": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true, "license": "MIT" }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -1227,6 +1264,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz", "integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==", + "dev": true, "license": "ISC", "engines": { "node": ">= 12" @@ -1236,6 +1274,7 @@ "version": "8.0.1", "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, "license": "ISC", "dependencies": { "string-width": "^4.2.0", @@ -1250,6 +1289,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", @@ -1267,6 +1307,7 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, "license": "MIT", "dependencies": { "color-name": "~1.1.4" @@ -1279,18 +1320,21 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true, "license": "MIT" }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, "license": "MIT" }, "node_modules/env-paths": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "dev": true, "license": "MIT", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" @@ -1343,6 +1387,7 @@ "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, "license": "MIT", "engines": { "node": ">=6" @@ -1352,6 +1397,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, "license": "MIT", "dependencies": { "chardet": "^0.7.0", @@ -1366,12 +1412,14 @@ "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, "license": "MIT" }, "node_modules/fast-glob": { "version": "3.3.3", "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", @@ -1388,6 +1436,7 @@ "version": "3.0.6", "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz", "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==", + "dev": true, "funding": [ { "type": "github", @@ -1404,6 +1453,7 @@ "version": "1.19.1", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz", "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==", + "dev": true, "license": "ISC", "dependencies": { "reusify": "^1.0.4" @@ -1413,6 +1463,7 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -1453,6 +1504,7 @@ "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, "license": "ISC", "engines": { "node": "6.* || 8.* || >= 10.*" @@ -1474,6 +1526,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, "license": "ISC", "dependencies": { "is-glob": "^4.0.1" @@ -1486,6 +1539,7 @@ "version": "14.1.0", "resolved": "https://registry.npmjs.org/globby/-/globby-14.1.0.tgz", "integrity": "sha512-0Ia46fDOaT7k4og1PDW4YbodWWr3scS2vAr2lTbsplOt2WkKp0vQbkI9wKis/T5LV/dqPjO3bpS/z6GTJB82LA==", + "dev": true, "license": "MIT", "dependencies": { "@sindresorhus/merge-streams": "^2.1.0", @@ -1512,6 +1566,7 @@ "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dev": true, "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" @@ -1524,6 +1579,7 @@ "version": "7.0.3", "resolved": "https://registry.npmjs.org/ignore/-/ignore-7.0.3.tgz", "integrity": "sha512-bAH5jbK/F3T3Jls4I0SO1hmPR0dKU0a7+SY6n1yzRtG54FLO8d6w/nxLFX2Nb7dBu6cCWXPaAME6cYqFUMmuCA==", + "dev": true, "license": "MIT", "engines": { "node": ">= 4" @@ -1533,6 +1589,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1542,6 +1599,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -1551,6 +1609,7 @@ "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" @@ -1563,6 +1622,7 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.12.0" @@ -1572,6 +1632,7 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -1584,6 +1645,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true, "license": "MIT" }, "node_modules/js-yaml": { @@ -1602,6 +1664,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true, "license": "MIT" }, "node_modules/jsonfile": { @@ -1617,9 +1680,9 @@ } }, "node_modules/marked": { - "version": "15.0.7", - "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.7.tgz", - "integrity": "sha512-dgLIeKGLx5FwziAnsk4ONoGwHwGPJzselimvlVskE9XLN4Orv9u2VA3GWw/lYUqjfA0rUT/6fqKwfZJapP9BEg==", + "version": "15.0.8", + "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.8.tgz", + "integrity": "sha512-rli4l2LyZqpQuRve5C0rkn6pj3hT8EWPC+zkAxFTAJLxRbENfTAhEQq9itrmf1Y81QtAX5D/MYlGlIomNgj9lA==", "license": "MIT", "bin": { "marked": "bin/marked.js" @@ -1632,6 +1695,7 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, "license": "MIT", "engines": { "node": ">= 8" @@ -1641,6 +1705,7 @@ "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, "license": "MIT", "dependencies": { "braces": "^3.0.3", @@ -1654,6 +1719,7 @@ "version": "7.1.2", "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, "license": "ISC", "engines": { "node": ">=16 || 14 >=14.17" @@ -1663,6 +1729,7 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz", "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==", + "dev": true, "license": "MIT", "dependencies": { "minipass": "^7.1.2" @@ -1675,6 +1742,7 @@ "version": "3.0.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", + "dev": true, "license": "MIT", "bin": { "mkdirp": "dist/cjs/src/bin.js" @@ -1690,6 +1758,7 @@ "version": "4.2.0", "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "dev": true, "license": "MIT", "bin": { "mustache": "bin/mustache" @@ -1699,6 +1768,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz", "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==", + "dev": true, "license": "ISC", "engines": { "node": "^18.17.0 || >=20.5.0" @@ -1708,6 +1778,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1717,6 +1788,7 @@ "version": "6.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-6.0.0.tgz", "integrity": "sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -1729,12 +1801,14 @@ "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, "license": "ISC" }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, "license": "MIT", "engines": { "node": ">=8.6" @@ -1747,6 +1821,7 @@ "version": "8.0.0", "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-8.0.0.tgz", "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", + "dev": true, "license": "MIT", "engines": { "node": ">=4" @@ -1756,6 +1831,7 @@ "version": "3.5.3", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", + "dev": true, "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" @@ -1783,6 +1859,7 @@ "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, "funding": [ { "type": "github", @@ -1803,6 +1880,7 @@ "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1812,6 +1890,7 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true, "license": "MIT", "engines": { "node": ">=0.10.0" @@ -1830,6 +1909,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, "license": "MIT", "engines": { "iojs": ">=1.0.0", @@ -1840,6 +1920,7 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, "funding": [ { "type": "github", @@ -1863,6 +1944,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true, "license": "MIT" }, "node_modules/semver": { @@ -1881,6 +1963,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, "license": "ISC", "engines": { "node": ">=14" @@ -1893,6 +1976,7 @@ "version": "5.1.0", "resolved": "https://registry.npmjs.org/slash/-/slash-5.1.0.tgz", "integrity": "sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==", + "dev": true, "license": "MIT", "engines": { "node": ">=14.16" @@ -1905,6 +1989,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", @@ -1919,6 +2004,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" @@ -1931,6 +2017,7 @@ "version": "7.4.3", "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", + "dev": true, "license": "ISC", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -1948,6 +2035,7 @@ "version": "0.2.5", "resolved": "https://registry.npmjs.org/temporal-polyfill/-/temporal-polyfill-0.2.5.tgz", "integrity": "sha512-ye47xp8Cb0nDguAhrrDS1JT1SzwEV9e26sSsrWzVu+yPZ7LzceEcH0i2gci9jWfOfSCCgM3Qv5nOYShVUUFUXA==", + "dev": true, "license": "MIT", "dependencies": { "temporal-spec": "^0.2.4" @@ -1957,12 +2045,14 @@ "version": "0.2.4", "resolved": "https://registry.npmjs.org/temporal-spec/-/temporal-spec-0.2.4.tgz", "integrity": "sha512-lDMFv4nKQrSjlkHKAlHVqKrBG4DyFfa9F74cmBZ3Iy3ed8yvWnlWSIdi4IKfSqwmazAohBNwiN64qGx4y5Q3IQ==", + "dev": true, "license": "ISC" }, "node_modules/tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, "license": "MIT", "dependencies": { "os-tmpdir": "~1.0.2" @@ -1975,6 +2065,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, "license": "MIT", "dependencies": { "is-number": "^7.0.0" @@ -2006,6 +2097,7 @@ "version": "0.21.3", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=10" @@ -2018,6 +2110,7 @@ "version": "0.3.0", "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.3.0.tgz", "integrity": "sha512-+QBBXBCvifc56fsbuxZQ6Sic3wqqc3WWaqxs58gvJrcOuN83HGTCwz3oS5phzU9LthRNE9VrJCFCLUgHeeFnfA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" @@ -2039,6 +2132,7 @@ "version": "8.2.0", "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "dev": true, "license": "MIT", "engines": { "node": ">=14.0.0" @@ -2048,6 +2142,7 @@ "version": "9.0.1", "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", + "dev": true, "license": "MIT", "dependencies": { "vscode-languageserver-protocol": "3.17.5" @@ -2060,6 +2155,7 @@ "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "dev": true, "license": "MIT", "dependencies": { "vscode-jsonrpc": "8.2.0", @@ -2070,18 +2166,21 @@ "version": "1.0.12", "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "dev": true, "license": "MIT" }, "node_modules/vscode-languageserver-types": { "version": "3.17.5", "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", + "dev": true, "license": "MIT" }, "node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, "license": "MIT", "dependencies": { "ansi-styles": "^4.0.0", @@ -2117,6 +2216,7 @@ "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, "license": "ISC", "engines": { "node": ">=10" @@ -2126,6 +2226,7 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "dev": true, "license": "BlueOak-1.0.0", "engines": { "node": ">=18" @@ -2135,6 +2236,7 @@ "version": "2.7.1", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.1.tgz", "integrity": "sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==", + "dev": true, "license": "ISC", "bin": { "yaml": "bin.mjs" @@ -2147,6 +2249,7 @@ "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, "license": "MIT", "dependencies": { "cliui": "^8.0.1", @@ -2165,6 +2268,7 @@ "version": "21.1.1", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, "license": "ISC", "engines": { "node": ">=12" @@ -2174,6 +2278,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz", "integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==", + "dev": true, "license": "MIT", "engines": { "node": ">=18" diff --git a/eng/emitter-package.json b/eng/emitter-package.json index 9cc25c310c15..ead043323116 100644 --- a/eng/emitter-package.json +++ b/eng/emitter-package.json @@ -1,23 +1,25 @@ { "main": "dist/src/index.js", "dependencies": { - "@azure-tools/typespec-python": "0.42.2" + "@azure-tools/typespec-python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@azure-tools/typespec-python/-/typespec-python-0.42.2-alpha.20250408.1.tgz" }, "devDependencies": { + "@azure-tools/typespec-autorest": "~0.54.0", + "@azure-tools/typespec-azure-core": "~0.54.0", + "@azure-tools/typespec-azure-resource-manager": "~0.54.0", + "@azure-tools/typespec-azure-rulesets": "~0.54.0", + "@azure-tools/typespec-client-generator-core": "~0.54.0", "@typespec/compiler": "^1.0.0-0", + "@typespec/events": "~0.68.0", "@typespec/http": "^1.0.0-0", - "@typespec/rest": "~0.68.0", - "@typespec/versioning": "~0.68.0", "@typespec/openapi": "^1.0.0-0", - "@typespec/events": "~0.68.0", + "@typespec/rest": "~0.68.0", "@typespec/sse": "~0.68.0", "@typespec/streams": "~0.68.0", - "@typespec/xml": "~0.68.0", - "@azure-tools/typespec-azure-core": "~0.54.0", - "@azure-tools/typespec-azure-resource-manager": "~0.54.0", - "@azure-tools/typespec-autorest": "~0.54.0", - "@azure-tools/typespec-azure-rulesets": "~0.54.0", - "@azure-tools/typespec-client-generator-core": "~0.54.0", - "@azure-tools/typespec-liftr-base": "0.8.0" + "@typespec/versioning": "~0.68.0", + "@typespec/xml": "~0.68.0" + }, + "overrides": { + "@autorest/python": "https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-js-test-autorest@local/npm/registry/@autorest/python/-/python-6.32.2-alpha.20250408.1.tgz" } -} \ No newline at end of file +} From 8b252625507a94ee80f91ab5789c9e53535cafa5 Mon Sep 17 00:00:00 2001 From: azure-sdk Date: Tue, 8 Apr 2025 07:39:33 +0000 Subject: [PATCH 2/2] Update SDK code ai_vi_0 --- sdk/batch/azure-batch/MANIFEST.in | 2 +- sdk/batch/azure-batch/apiview-properties.json | 286 + sdk/batch/azure-batch/azure/batch/__init__.py | 33 +- .../azure/batch/_operations/_patch.py | 55 +- sdk/batch/azure-batch/azure/batch/_patch.py | 2 +- .../azure/batch/aio/_operations/_patch.py | 57 +- .../azure-batch/azure/batch/aio/_patch.py | 2 +- .../azure/batch/models/__init__.py | 2 - .../azure-batch/azure/batch/models/_models.py | 736 +- .../azure-batch/azure/batch/models/_patch.py | 6 +- sdk/batch/azure-batch/client/__init__.py | 32 + sdk/batch/azure-batch/client/_client.py | 100 + .../azure-batch/client/_configuration.py | 65 + sdk/batch/azure-batch/client/_model_base.py | 1236 ++ .../client/_operations/__init__.py | 25 + .../client/_operations/_operations.py | 11483 ++++++++++++++++ .../azure-batch/client/_operations/_patch.py | 21 + sdk/batch/azure-batch/client/_patch.py | 21 + .../azure-batch/client/_serialization.py | 2032 +++ sdk/batch/azure-batch/client/_vendor.py | 57 + sdk/batch/azure-batch/client/_version.py | 9 + sdk/batch/azure-batch/client/aio/__init__.py | 29 + sdk/batch/azure-batch/client/aio/_client.py | 102 + .../azure-batch/client/aio/_configuration.py | 65 + .../client/aio/_operations/__init__.py | 25 + .../client/aio/_operations/_operations.py | 8308 +++++++++++ .../client/aio/_operations/_patch.py | 21 + sdk/batch/azure-batch/client/aio/_patch.py | 21 + sdk/batch/azure-batch/client/aio/_vendor.py | 57 + sdk/batch/azure-batch/client/py.typed | 1 + sdk/batch/azure-batch/setup.py | 6 +- sdk/batch/azure-batch/tests/test_batch.py | 6 +- .../azure-developer-loadtesting/MANIFEST.in | 2 +- .../apiview-properties.json | 101 + .../customizations/__init__.py | 34 + .../customizations/_client.py | 174 + .../customizations/_configuration.py | 112 + .../customizations/_model_base.py | 1236 ++ .../customizations/_operations/__init__.py | 27 + .../customizations/_operations/_operations.py | 4494 ++++++ .../customizations/_operations/_patch.py | 21 + .../customizations/_patch.py | 21 + .../customizations/_serialization.py | 2032 +++ .../customizations/_validation.py | 50 + .../customizations/_vendor.py | 34 + .../customizations/_version.py | 9 + .../customizations/aio/__init__.py | 31 + .../customizations/aio/_client.py | 178 + .../customizations/aio/_configuration.py | 112 + .../aio/_operations/__init__.py | 27 + .../aio/_operations/_operations.py | 3476 +++++ .../customizations/aio/_operations/_patch.py | 21 + .../customizations/aio/_patch.py | 21 + .../customizations/aio/_vendor.py | 34 + .../customizations/py.typed | 1 + .../create_or_update_test.py | 86 + .../create_or_update_test_app_components.py | 52 + .../create_or_update_test_profile.py | 55 + ...reate_or_update_test_run_app_components.py | 52 + ...r_update_test_run_server_metrics_config.py | 55 + ...te_or_update_test_server_metrics_config.py | 55 + .../generated_samples/delete_test.py | 40 + .../generated_samples/delete_test_file.py | 41 + .../generated_samples/delete_test_profile.py | 40 + .../delete_test_profile_run.py | 40 + .../generated_samples/delete_test_run.py | 40 + .../generated_samples/get_test.py | 41 + .../generated_samples/get_test_file.py | 42 + .../generated_samples/get_test_profile.py | 41 + .../get_test_profile_run_executed.py | 41 + .../get_test_profile_run_executing.py | 41 + .../generated_samples/get_test_run.py | 41 + .../generated_samples/get_test_run_file.py | 42 + .../list_metric_dimension_values.py | 46 + .../list_test_app_components.py | 41 + .../generated_samples/list_test_files.py | 42 + .../list_test_profile_runs.py | 40 + .../generated_samples/list_test_profiles.py | 40 + .../list_test_run_app_components.py | 41 + .../list_test_run_metrics.py | 45 + .../list_test_run_metrics_definitions.py | 42 + .../list_test_run_metrics_namespaces.py | 41 + .../list_test_run_server_metrics_config.py | 41 + .../generated_samples/list_test_runs.py | 40 + .../list_test_server_metrics_config.py | 41 + .../generated_samples/list_tests.py | 40 + .../stop_test_profile_run.py | 41 + .../generated_samples/stop_test_run.py | 41 + .../generated_tests/conftest.py | 56 + .../test_load_test_administration.py | 341 + .../test_load_test_administration_async.py | 342 + .../generated_tests/test_load_test_run.py | 242 + .../test_load_test_run_async.py | 243 + .../generated_tests/testpreparer.py | 44 + .../generated_tests/testpreparer_async.py | 31 + .../microsoft/__init__.py | 1 + .../microsoft/loadtestservice/__init__.py | 1 + .../loadtestservice/models/__init__.py | 156 + .../loadtestservice/models/_enums.py | 317 + .../loadtestservice/models/_models.py | 2599 ++++ .../loadtestservice/models/_patch.py | 21 + .../create_or_update_app_components_test.py | 1 + .../samples/create_or_update_test.py | 1 + .../samples/create_or_update_test_profile.py | 7 +- .../samples/upload_test_file.py | 1 + .../azure-developer-loadtesting/setup.py | 6 +- ...test_async_load_test_administration_ops.py | 60 +- .../tests/test_async_load_test_run_ops.py | 69 +- .../test_load_test_administration_ops.py | 63 +- .../tests/test_load_test_run_ops.py | 79 +- .../tests/testcase.py | 2 +- .../tests/testcase_async.py | 6 +- 112 files changed, 42682 insertions(+), 625 deletions(-) create mode 100644 sdk/batch/azure-batch/apiview-properties.json create mode 100644 sdk/batch/azure-batch/client/__init__.py create mode 100644 sdk/batch/azure-batch/client/_client.py create mode 100644 sdk/batch/azure-batch/client/_configuration.py create mode 100644 sdk/batch/azure-batch/client/_model_base.py create mode 100644 sdk/batch/azure-batch/client/_operations/__init__.py create mode 100644 sdk/batch/azure-batch/client/_operations/_operations.py create mode 100644 sdk/batch/azure-batch/client/_operations/_patch.py create mode 100644 sdk/batch/azure-batch/client/_patch.py create mode 100644 sdk/batch/azure-batch/client/_serialization.py create mode 100644 sdk/batch/azure-batch/client/_vendor.py create mode 100644 sdk/batch/azure-batch/client/_version.py create mode 100644 sdk/batch/azure-batch/client/aio/__init__.py create mode 100644 sdk/batch/azure-batch/client/aio/_client.py create mode 100644 sdk/batch/azure-batch/client/aio/_configuration.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/__init__.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/_operations.py create mode 100644 sdk/batch/azure-batch/client/aio/_operations/_patch.py create mode 100644 sdk/batch/azure-batch/client/aio/_patch.py create mode 100644 sdk/batch/azure-batch/client/aio/_vendor.py create mode 100644 sdk/batch/azure-batch/client/py.typed create mode 100644 sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_model_base.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_serialization.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_vendor.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_vendor.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py create mode 100644 sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py diff --git a/sdk/batch/azure-batch/MANIFEST.in b/sdk/batch/azure-batch/MANIFEST.in index cb1e2b1128cb..06208ba9fde2 100644 --- a/sdk/batch/azure-batch/MANIFEST.in +++ b/sdk/batch/azure-batch/MANIFEST.in @@ -3,4 +3,4 @@ include LICENSE include azure/batch/py.typed recursive-include tests *.py recursive-include samples *.py *.md -include azure/__init__.py \ No newline at end of file +include azure/__init__.py diff --git a/sdk/batch/azure-batch/apiview-properties.json b/sdk/batch/azure-batch/apiview-properties.json new file mode 100644 index 000000000000..c966e04c8976 --- /dev/null +++ b/sdk/batch/azure-batch/apiview-properties.json @@ -0,0 +1,286 @@ +{ + "CrossLanguagePackageId": "Azure.Batch", + "CrossLanguageDefinitionId": { + "azure.batch.models.AffinityInfo": "Azure.Batch.AffinityInfo", + "azure.batch.models.AuthenticationTokenSettings": "Azure.Batch.AuthenticationTokenSettings", + "azure.batch.models.AutomaticOsUpgradePolicy": "Azure.Batch.AutomaticOsUpgradePolicy", + "azure.batch.models.AutoScaleRun": "Azure.Batch.AutoScaleRun", + "azure.batch.models.AutoScaleRunError": "Azure.Batch.AutoScaleRunError", + "azure.batch.models.AutoUserSpecification": "Azure.Batch.AutoUserSpecification", + "azure.batch.models.AzureBlobFileSystemConfiguration": "Azure.Batch.AzureBlobFileSystemConfiguration", + "azure.batch.models.AzureFileShareConfiguration": "Azure.Batch.AzureFileShareConfiguration", + "azure.batch.models.BatchApplication": "Azure.Batch.BatchApplication", + "azure.batch.models.BatchApplicationPackageReference": "Azure.Batch.BatchApplicationPackageReference", + "azure.batch.models.BatchAutoPoolSpecification": "Azure.Batch.BatchAutoPoolSpecification", + "azure.batch.models.BatchCertificate": "Azure.Batch.BatchCertificate", + "azure.batch.models.BatchCertificateReference": "Azure.Batch.BatchCertificateReference", + "azure.batch.models.BatchError": "Azure.Batch.BatchError", + "azure.batch.models.BatchErrorDetail": "Azure.Batch.BatchErrorDetail", + "azure.batch.models.BatchErrorMessage": "Azure.Batch.BatchErrorMessage", + "azure.batch.models.BatchJob": "Azure.Batch.BatchJob", + "azure.batch.models.BatchJobConstraints": "Azure.Batch.BatchJobConstraints", + "azure.batch.models.BatchJobCreateContent": "Azure.Batch.BatchJobCreateContent", + "azure.batch.models.BatchJobDisableContent": "Azure.Batch.BatchJobDisableContent", + "azure.batch.models.BatchJobExecutionInfo": "Azure.Batch.BatchJobExecutionInfo", + "azure.batch.models.BatchJobManagerTask": "Azure.Batch.BatchJobManagerTask", + "azure.batch.models.BatchJobNetworkConfiguration": "Azure.Batch.BatchJobNetworkConfiguration", + "azure.batch.models.BatchJobPreparationAndReleaseTaskStatus": "Azure.Batch.BatchJobPreparationAndReleaseTaskStatus", + "azure.batch.models.BatchJobPreparationTask": "Azure.Batch.BatchJobPreparationTask", + "azure.batch.models.BatchJobPreparationTaskExecutionInfo": "Azure.Batch.BatchJobPreparationTaskExecutionInfo", + "azure.batch.models.BatchJobReleaseTask": "Azure.Batch.BatchJobReleaseTask", + "azure.batch.models.BatchJobReleaseTaskExecutionInfo": "Azure.Batch.BatchJobReleaseTaskExecutionInfo", + "azure.batch.models.BatchJobSchedule": "Azure.Batch.BatchJobSchedule", + "azure.batch.models.BatchJobScheduleConfiguration": "Azure.Batch.BatchJobScheduleConfiguration", + "azure.batch.models.BatchJobScheduleCreateContent": "Azure.Batch.BatchJobScheduleCreateContent", + "azure.batch.models.BatchJobScheduleExecutionInfo": "Azure.Batch.BatchJobScheduleExecutionInfo", + "azure.batch.models.BatchJobScheduleStatistics": "Azure.Batch.BatchJobScheduleStatistics", + "azure.batch.models.BatchJobScheduleUpdateContent": "Azure.Batch.BatchJobScheduleUpdateContent", + "azure.batch.models.BatchJobSchedulingError": "Azure.Batch.BatchJobSchedulingError", + "azure.batch.models.BatchJobSpecification": "Azure.Batch.BatchJobSpecification", + "azure.batch.models.BatchJobStatistics": "Azure.Batch.BatchJobStatistics", + "azure.batch.models.BatchJobTerminateContent": "Azure.Batch.BatchJobTerminateContent", + "azure.batch.models.BatchJobUpdateContent": "Azure.Batch.BatchJobUpdateContent", + "azure.batch.models.BatchNode": "Azure.Batch.BatchNode", + "azure.batch.models.BatchNodeAgentInfo": "Azure.Batch.BatchNodeAgentInfo", + "azure.batch.models.BatchNodeCounts": "Azure.Batch.BatchNodeCounts", + "azure.batch.models.BatchNodeDeallocateContent": "Azure.Batch.BatchNodeDeallocateContent", + "azure.batch.models.BatchNodeDisableSchedulingContent": "Azure.Batch.BatchNodeDisableSchedulingContent", + "azure.batch.models.BatchNodeEndpointConfiguration": "Azure.Batch.BatchNodeEndpointConfiguration", + "azure.batch.models.BatchNodeError": "Azure.Batch.BatchNodeError", + "azure.batch.models.BatchNodeFile": "Azure.Batch.BatchNodeFile", + "azure.batch.models.BatchNodeIdentityReference": "Azure.Batch.BatchNodeIdentityReference", + "azure.batch.models.BatchNodeInfo": "Azure.Batch.BatchNodeInfo", + "azure.batch.models.BatchNodePlacementConfiguration": "Azure.Batch.BatchNodePlacementConfiguration", + "azure.batch.models.BatchNodeRebootContent": "Azure.Batch.BatchNodeRebootContent", + "azure.batch.models.BatchNodeReimageContent": "Azure.Batch.BatchNodeReimageContent", + "azure.batch.models.BatchNodeRemoteLoginSettings": "Azure.Batch.BatchNodeRemoteLoginSettings", + "azure.batch.models.BatchNodeRemoveContent": "Azure.Batch.BatchNodeRemoveContent", + "azure.batch.models.BatchNodeUserCreateContent": "Azure.Batch.BatchNodeUserCreateContent", + "azure.batch.models.BatchNodeUserUpdateContent": "Azure.Batch.BatchNodeUserUpdateContent", + "azure.batch.models.BatchNodeVMExtension": "Azure.Batch.BatchNodeVMExtension", + "azure.batch.models.BatchPool": "Azure.Batch.BatchPool", + "azure.batch.models.BatchPoolCreateContent": "Azure.Batch.BatchPoolCreateContent", + "azure.batch.models.BatchPoolEnableAutoScaleContent": "Azure.Batch.BatchPoolEnableAutoScaleContent", + "azure.batch.models.BatchPoolEndpointConfiguration": "Azure.Batch.BatchPoolEndpointConfiguration", + "azure.batch.models.BatchPoolEvaluateAutoScaleContent": "Azure.Batch.BatchPoolEvaluateAutoScaleContent", + "azure.batch.models.BatchPoolIdentity": "Azure.Batch.BatchPoolIdentity", + "azure.batch.models.BatchPoolInfo": "Azure.Batch.BatchPoolInfo", + "azure.batch.models.BatchPoolNodeCounts": "Azure.Batch.BatchPoolNodeCounts", + "azure.batch.models.BatchPoolReplaceContent": "Azure.Batch.BatchPoolReplaceContent", + "azure.batch.models.BatchPoolResizeContent": "Azure.Batch.BatchPoolResizeContent", + "azure.batch.models.BatchPoolResourceStatistics": "Azure.Batch.BatchPoolResourceStatistics", + "azure.batch.models.BatchPoolSpecification": "Azure.Batch.BatchPoolSpecification", + "azure.batch.models.BatchPoolStatistics": "Azure.Batch.BatchPoolStatistics", + "azure.batch.models.BatchPoolUpdateContent": "Azure.Batch.BatchPoolUpdateContent", + "azure.batch.models.BatchPoolUsageMetrics": "Azure.Batch.BatchPoolUsageMetrics", + "azure.batch.models.BatchPoolUsageStatistics": "Azure.Batch.BatchPoolUsageStatistics", + "azure.batch.models.BatchStartTask": "Azure.Batch.BatchStartTask", + "azure.batch.models.BatchStartTaskInfo": "Azure.Batch.BatchStartTaskInfo", + "azure.batch.models.BatchSubtask": "Azure.Batch.BatchSubtask", + "azure.batch.models.BatchSupportedImage": "Azure.Batch.BatchSupportedImage", + "azure.batch.models.BatchTask": "Azure.Batch.BatchTask", + "azure.batch.models.BatchTaskAddCollectionResult": "Azure.Batch.BatchTaskAddCollectionResult", + "azure.batch.models.BatchTaskAddResult": "Azure.Batch.BatchTaskAddResult", + "azure.batch.models.BatchTaskConstraints": "Azure.Batch.BatchTaskConstraints", + "azure.batch.models.BatchTaskContainerExecutionInfo": "Azure.Batch.BatchTaskContainerExecutionInfo", + "azure.batch.models.BatchTaskContainerSettings": "Azure.Batch.BatchTaskContainerSettings", + "azure.batch.models.BatchTaskCounts": "Azure.Batch.BatchTaskCounts", + "azure.batch.models.BatchTaskCountsResult": "Azure.Batch.BatchTaskCountsResult", + "azure.batch.models.BatchTaskCreateContent": "Azure.Batch.BatchTaskCreateContent", + "azure.batch.models.BatchTaskDependencies": "Azure.Batch.BatchTaskDependencies", + "azure.batch.models.BatchTaskExecutionInfo": "Azure.Batch.BatchTaskExecutionInfo", + "azure.batch.models.BatchTaskFailureInfo": "Azure.Batch.BatchTaskFailureInfo", + "azure.batch.models.BatchTaskGroup": "Azure.Batch.BatchTaskGroup", + "azure.batch.models.BatchTaskIdRange": "Azure.Batch.BatchTaskIdRange", + "azure.batch.models.BatchTaskInfo": "Azure.Batch.BatchTaskInfo", + "azure.batch.models.BatchTaskSchedulingPolicy": "Azure.Batch.BatchTaskSchedulingPolicy", + "azure.batch.models.BatchTaskSlotCounts": "Azure.Batch.BatchTaskSlotCounts", + "azure.batch.models.BatchTaskStatistics": "Azure.Batch.BatchTaskStatistics", + "azure.batch.models.CifsMountConfiguration": "Azure.Batch.CifsMountConfiguration", + "azure.batch.models.ContainerConfiguration": "Azure.Batch.ContainerConfiguration", + "azure.batch.models.ContainerHostBatchBindMountEntry": "Azure.Batch.ContainerHostBatchBindMountEntry", + "azure.batch.models.ContainerRegistryReference": "Azure.Batch.ContainerRegistryReference", + "azure.batch.models.DataDisk": "Azure.Batch.DataDisk", + "azure.batch.models.DeleteBatchCertificateError": "Azure.Batch.DeleteBatchCertificateError", + "azure.batch.models.DiffDiskSettings": "Azure.Batch.DiffDiskSettings", + "azure.batch.models.DiskEncryptionConfiguration": "Azure.Batch.DiskEncryptionConfiguration", + "azure.batch.models.EnvironmentSetting": "Azure.Batch.EnvironmentSetting", + "azure.batch.models.ExitCodeMapping": "Azure.Batch.ExitCodeMapping", + "azure.batch.models.ExitCodeRangeMapping": "Azure.Batch.ExitCodeRangeMapping", + "azure.batch.models.ExitConditions": "Azure.Batch.ExitConditions", + "azure.batch.models.ExitOptions": "Azure.Batch.ExitOptions", + "azure.batch.models.FileProperties": "Azure.Batch.FileProperties", + "azure.batch.models.HttpHeader": "Azure.Batch.HttpHeader", + "azure.batch.models.ImageReference": "Azure.Batch.ImageReference", + "azure.batch.models.InboundEndpoint": "Azure.Batch.InboundEndpoint", + "azure.batch.models.InboundNatPool": "Azure.Batch.InboundNatPool", + "azure.batch.models.InstanceViewStatus": "Azure.Batch.InstanceViewStatus", + "azure.batch.models.LinuxUserConfiguration": "Azure.Batch.LinuxUserConfiguration", + "azure.batch.models.ManagedDisk": "Azure.Batch.ManagedDisk", + "azure.batch.models.MetadataItem": "Azure.Batch.MetadataItem", + "azure.batch.models.MountConfiguration": "Azure.Batch.MountConfiguration", + "azure.batch.models.MultiInstanceSettings": "Azure.Batch.MultiInstanceSettings", + "azure.batch.models.NameValuePair": "Azure.Batch.NameValuePair", + "azure.batch.models.NetworkConfiguration": "Azure.Batch.NetworkConfiguration", + "azure.batch.models.NetworkSecurityGroupRule": "Azure.Batch.NetworkSecurityGroupRule", + "azure.batch.models.NfsMountConfiguration": "Azure.Batch.NfsMountConfiguration", + "azure.batch.models.OSDisk": "Azure.Batch.OSDisk", + "azure.batch.models.OutputFile": "Azure.Batch.OutputFile", + "azure.batch.models.OutputFileBlobContainerDestination": "Azure.Batch.OutputFileBlobContainerDestination", + "azure.batch.models.OutputFileDestination": "Azure.Batch.OutputFileDestination", + "azure.batch.models.OutputFileUploadConfig": "Azure.Batch.OutputFileUploadConfig", + "azure.batch.models.PublicIpAddressConfiguration": "Azure.Batch.PublicIpAddressConfiguration", + "azure.batch.models.RecentBatchJob": "Azure.Batch.RecentBatchJob", + "azure.batch.models.ResizeError": "Azure.Batch.ResizeError", + "azure.batch.models.ResourceFile": "Azure.Batch.ResourceFile", + "azure.batch.models.RollingUpgradePolicy": "Azure.Batch.RollingUpgradePolicy", + "azure.batch.models.SecurityProfile": "Azure.Batch.SecurityProfile", + "azure.batch.models.ServiceArtifactReference": "Azure.Batch.ServiceArtifactReference", + "azure.batch.models.UefiSettings": "Azure.Batch.UefiSettings", + "azure.batch.models.UpgradePolicy": "Azure.Batch.UpgradePolicy", + "azure.batch.models.UploadBatchServiceLogsContent": "Azure.Batch.UploadBatchServiceLogsContent", + "azure.batch.models.UploadBatchServiceLogsResult": "Azure.Batch.UploadBatchServiceLogsResult", + "azure.batch.models.UserAccount": "Azure.Batch.UserAccount", + "azure.batch.models.UserAssignedIdentity": "Azure.Batch.UserAssignedIdentity", + "azure.batch.models.UserIdentity": "Azure.Batch.UserIdentity", + "azure.batch.models.VirtualMachineConfiguration": "Azure.Batch.VirtualMachineConfiguration", + "azure.batch.models.VirtualMachineInfo": "Azure.Batch.VirtualMachineInfo", + "azure.batch.models.VMDiskSecurityProfile": "Azure.Batch.VMDiskSecurityProfile", + "azure.batch.models.VMExtension": "Azure.Batch.VMExtension", + "azure.batch.models.VMExtensionInstanceView": "Azure.Batch.VMExtensionInstanceView", + "azure.batch.models.WindowsConfiguration": "Azure.Batch.WindowsConfiguration", + "azure.batch.models.WindowsUserConfiguration": "Azure.Batch.WindowsUserConfiguration", + "client.models.CachingType": "Azure.Batch.CachingType", + "client.models.StorageAccountType": "Azure.Batch.StorageAccountType", + "client.models.ContainerType": "Azure.Batch.ContainerType", + "client.models.DiskEncryptionTarget": "Azure.Batch.DiskEncryptionTarget", + "client.models.BatchNodePlacementPolicyType": "Azure.Batch.BatchNodePlacementPolicyType", + "client.models.DiffDiskPlacement": "Azure.Batch.DiffDiskPlacement", + "client.models.SecurityEncryptionTypes": "Azure.Batch.SecurityEncryptionTypes", + "client.models.SecurityTypes": "Azure.Batch.SecurityTypes", + "client.models.DynamicVNetAssignmentScope": "Azure.Batch.DynamicVNetAssignmentScope", + "client.models.InboundEndpointProtocol": "Azure.Batch.InboundEndpointProtocol", + "client.models.NetworkSecurityGroupRuleAccess": "Azure.Batch.NetworkSecurityGroupRuleAccess", + "client.models.IpAddressProvisioningType": "Azure.Batch.IpAddressProvisioningType", + "client.models.ContainerWorkingDirectory": "Azure.Batch.ContainerWorkingDirectory", + "client.models.ContainerHostDataPath": "Azure.Batch.ContainerHostDataPath", + "client.models.AutoUserScope": "Azure.Batch.AutoUserScope", + "client.models.ElevationLevel": "Azure.Batch.ElevationLevel", + "client.models.BatchCertificateStoreLocation": "Azure.Batch.BatchCertificateStoreLocation", + "client.models.BatchCertificateVisibility": "Azure.Batch.BatchCertificateVisibility", + "client.models.BatchNodeFillType": "Azure.Batch.BatchNodeFillType", + "client.models.LoginMode": "Azure.Batch.LoginMode", + "client.models.BatchNodeCommunicationMode": "Azure.Batch.BatchNodeCommunicationMode", + "client.models.UpgradeMode": "Azure.Batch.UpgradeMode", + "client.models.BatchPoolState": "Azure.Batch.BatchPoolState", + "client.models.AllocationState": "Azure.Batch.AllocationState", + "client.models.BatchPoolIdentityType": "Azure.Batch.BatchPoolIdentityType", + "client.models.BatchNodeDeallocationOption": "Azure.Batch.BatchNodeDeallocationOption", + "client.models.OSType": "Azure.Batch.OSType", + "client.models.ImageVerificationType": "Azure.Batch.ImageVerificationType", + "client.models.BatchJobState": "Azure.Batch.BatchJobState", + "client.models.OutputFileUploadCondition": "Azure.Batch.OutputFileUploadCondition", + "client.models.AccessScope": "Azure.Batch.AccessScope", + "client.models.BatchPoolLifetimeOption": "Azure.Batch.BatchPoolLifetimeOption", + "client.models.OnAllBatchTasksComplete": "Azure.Batch.OnAllBatchTasksComplete", + "client.models.OnBatchTaskFailure": "Azure.Batch.OnBatchTaskFailure", + "client.models.ErrorCategory": "Azure.Batch.ErrorCategory", + "client.models.DisableBatchJobOption": "Azure.Batch.DisableBatchJobOption", + "client.models.BatchJobPreparationTaskState": "Azure.Batch.BatchJobPreparationTaskState", + "client.models.BatchTaskExecutionResult": "Azure.Batch.BatchTaskExecutionResult", + "client.models.BatchJobReleaseTaskState": "Azure.Batch.BatchJobReleaseTaskState", + "client.models.BatchCertificateState": "Azure.Batch.BatchCertificateState", + "client.models.BatchCertificateFormat": "Azure.Batch.BatchCertificateFormat", + "client.models.BatchJobScheduleState": "Azure.Batch.BatchJobScheduleState", + "client.models.BatchJobAction": "Azure.Batch.BatchJobAction", + "client.models.DependencyAction": "Azure.Batch.DependencyAction", + "client.models.BatchTaskState": "Azure.Batch.BatchTaskState", + "client.models.BatchTaskAddStatus": "Azure.Batch.BatchTaskAddStatus", + "client.models.BatchSubtaskState": "Azure.Batch.BatchSubtaskState", + "client.models.BatchNodeState": "Azure.Batch.BatchNodeState", + "client.models.SchedulingState": "Azure.Batch.SchedulingState", + "client.models.BatchStartTaskState": "Azure.Batch.BatchStartTaskState", + "client.models.BatchNodeRebootOption": "Azure.Batch.BatchNodeRebootOption", + "client.models.BatchNodeReimageOption": "Azure.Batch.BatchNodeReimageOption", + "client.models.BatchNodeDeallocateOption": "Azure.Batch.BatchNodeDeallocateOption", + "client.models.BatchNodeDisableSchedulingOption": "Azure.Batch.BatchNodeDisableSchedulingOption", + "client.models.StatusLevelTypes": "Azure.Batch.StatusLevelTypes", + "client.BatchClient.list_applications": "Client.BatchClient.listApplications", + "client.BatchClient.get_application": "Client.BatchClient.getApplication", + "client.BatchClient.list_pool_usage_metrics": "Client.BatchClient.listPoolUsageMetrics", + "client.BatchClient.create_pool": "Client.BatchClient.createPool", + "client.BatchClient.list_pools": "Client.BatchClient.listPools", + "client.BatchClient.delete_pool": "Client.BatchClient.deletePool", + "client.BatchClient.pool_exists": "Client.BatchClient.poolExists", + "client.BatchClient.get_pool": "Client.BatchClient.getPool", + "client.BatchClient.update_pool": "Client.BatchClient.updatePool", + "client.BatchClient.disable_pool_auto_scale": "Client.BatchClient.disablePoolAutoScale", + "client.BatchClient.enable_pool_auto_scale": "Client.BatchClient.enablePoolAutoScale", + "client.BatchClient.evaluate_pool_auto_scale": "Client.BatchClient.evaluatePoolAutoScale", + "client.BatchClient.resize_pool": "Client.BatchClient.resizePool", + "client.BatchClient.stop_pool_resize": "Client.BatchClient.stopPoolResize", + "client.BatchClient.replace_pool_properties": "Client.BatchClient.replacePoolProperties", + "client.BatchClient.remove_nodes": "Client.BatchClient.removeNodes", + "client.BatchClient.list_supported_images": "Client.BatchClient.listSupportedImages", + "client.BatchClient.list_pool_node_counts": "Client.BatchClient.listPoolNodeCounts", + "client.BatchClient.delete_job": "Client.BatchClient.deleteJob", + "client.BatchClient.get_job": "Client.BatchClient.getJob", + "client.BatchClient.update_job": "Client.BatchClient.updateJob", + "client.BatchClient.replace_job": "Client.BatchClient.replaceJob", + "client.BatchClient.disable_job": "Client.BatchClient.disableJob", + "client.BatchClient.enable_job": "Client.BatchClient.enableJob", + "client.BatchClient.terminate_job": "Client.BatchClient.terminateJob", + "client.BatchClient.create_job": "Client.BatchClient.createJob", + "client.BatchClient.list_jobs": "Client.BatchClient.listJobs", + "client.BatchClient.list_jobs_from_schedule": "Client.BatchClient.listJobsFromSchedule", + "client.BatchClient.list_job_preparation_and_release_task_status": "Client.BatchClient.listJobPreparationAndReleaseTaskStatus", + "client.BatchClient.get_job_task_counts": "Client.BatchClient.getJobTaskCounts", + "client.BatchClient.create_certificate": "Client.BatchClient.createCertificate", + "client.BatchClient.list_certificates": "Client.BatchClient.listCertificates", + "client.BatchClient.cancel_certificate_deletion": "Client.BatchClient.cancelCertificateDeletion", + "client.BatchClient.delete_certificate": "Client.BatchClient.deleteCertificate", + "client.BatchClient.get_certificate": "Client.BatchClient.getCertificate", + "client.BatchClient.job_schedule_exists": "Client.BatchClient.jobScheduleExists", + "client.BatchClient.delete_job_schedule": "Client.BatchClient.deleteJobSchedule", + "client.BatchClient.get_job_schedule": "Client.BatchClient.getJobSchedule", + "client.BatchClient.update_job_schedule": "Client.BatchClient.updateJobSchedule", + "client.BatchClient.replace_job_schedule": "Client.BatchClient.replaceJobSchedule", + "client.BatchClient.disable_job_schedule": "Client.BatchClient.disableJobSchedule", + "client.BatchClient.enable_job_schedule": "Client.BatchClient.enableJobSchedule", + "client.BatchClient.terminate_job_schedule": "Client.BatchClient.terminateJobSchedule", + "client.BatchClient.create_job_schedule": "Client.BatchClient.createJobSchedule", + "client.BatchClient.list_job_schedules": "Client.BatchClient.listJobSchedules", + "client.BatchClient.create_task": "Client.BatchClient.createTask", + "client.BatchClient.list_tasks": "Client.BatchClient.listTasks", + "client.BatchClient.create_task_collection": "Client.BatchClient.createTaskCollection", + "client.BatchClient.delete_task": "Client.BatchClient.deleteTask", + "client.BatchClient.get_task": "Client.BatchClient.getTask", + "client.BatchClient.replace_task": "Client.BatchClient.replaceTask", + "client.BatchClient.list_sub_tasks": "Client.BatchClient.listSubTasks", + "client.BatchClient.terminate_task": "Client.BatchClient.terminateTask", + "client.BatchClient.reactivate_task": "Client.BatchClient.reactivateTask", + "client.BatchClient.delete_task_file": "Client.BatchClient.deleteTaskFile", + "client.BatchClient.get_task_file": "Client.BatchClient.getTaskFile", + "client.BatchClient.get_task_file_properties": "Client.BatchClient.getTaskFileProperties", + "client.BatchClient.list_task_files": "Client.BatchClient.listTaskFiles", + "client.BatchClient.create_node_user": "Client.BatchClient.createNodeUser", + "client.BatchClient.delete_node_user": "Client.BatchClient.deleteNodeUser", + "client.BatchClient.replace_node_user": "Client.BatchClient.replaceNodeUser", + "client.BatchClient.get_node": "Client.BatchClient.getNode", + "client.BatchClient.reboot_node": "Client.BatchClient.rebootNode", + "client.BatchClient.start_node": "Client.BatchClient.startNode", + "client.BatchClient.reimage_node": "Client.BatchClient.reimageNode", + "client.BatchClient.deallocate_node": "Client.BatchClient.deallocateNode", + "client.BatchClient.disable_node_scheduling": "Client.BatchClient.disableNodeScheduling", + "client.BatchClient.enable_node_scheduling": "Client.BatchClient.enableNodeScheduling", + "client.BatchClient.get_node_remote_login_settings": "Client.BatchClient.getNodeRemoteLoginSettings", + "client.BatchClient.upload_node_logs": "Client.BatchClient.uploadNodeLogs", + "client.BatchClient.list_nodes": "Client.BatchClient.listNodes", + "client.BatchClient.get_node_extension": "Client.BatchClient.getNodeExtension", + "client.BatchClient.list_node_extensions": "Client.BatchClient.listNodeExtensions", + "client.BatchClient.delete_node_file": "Client.BatchClient.deleteNodeFile", + "client.BatchClient.get_node_file": "Client.BatchClient.getNodeFile", + "client.BatchClient.get_node_file_properties": "Client.BatchClient.getNodeFileProperties", + "client.BatchClient.list_node_files": "Client.BatchClient.listNodeFiles" + } +} \ No newline at end of file diff --git a/sdk/batch/azure-batch/azure/batch/__init__.py b/sdk/batch/azure-batch/azure/batch/__init__.py index a02f2bbd5c47..d55ccad1f573 100644 --- a/sdk/batch/azure-batch/azure/batch/__init__.py +++ b/sdk/batch/azure-batch/azure/batch/__init__.py @@ -1,32 +1 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) Python Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- -# pylint: disable=wrong-import-position - -from typing import TYPE_CHECKING - -if TYPE_CHECKING: - from ._patch import * # pylint: disable=unused-wildcard-import - -from ._client import BatchClient # type: ignore -from ._version import VERSION - -__version__ = VERSION - -try: - from ._patch import __all__ as _patch_all - from ._patch import * -except ImportError: - _patch_all = [] -from ._patch import patch_sdk as _patch_sdk - -__all__ = [ - "BatchClient", -] -__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore - -_patch_sdk() +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/batch/azure-batch/azure/batch/_operations/_patch.py b/sdk/batch/azure-batch/azure/batch/_operations/_patch.py index 4e0857b30791..d5af410f3a77 100644 --- a/sdk/batch/azure-batch/azure/batch/_operations/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/_operations/_patch.py @@ -91,9 +91,7 @@ def create_tasks( # deque operations(append/pop) are thread-safe results_queue: Deque[_models.BatchTaskAddResult] = collections.deque() - task_workflow_manager = _TaskWorkflowManager( - self, job_id=job_id, task_collection=task_collection, **kwargs - ) + task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs) # multi-threaded behavior if concurrencies: @@ -241,18 +239,19 @@ def get_node_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore - pool_id, - node_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore + pool_id, + node_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -309,18 +308,19 @@ def get_task_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore - job_id, - task_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore + job_id, + task_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -523,9 +523,8 @@ def _bulk_add_tasks(self, results_queue, chunk_tasks_to_add): for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) - elif ( - task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR - and not (task_result.error and task_result.error.code == "TaskExists") + elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not ( + task_result.error and task_result.error.code == "TaskExists" ): # Client error will be recorded unless Task already exists self.failure_tasks.appendleft(task_result) diff --git a/sdk/batch/azure-batch/azure/batch/_patch.py b/sdk/batch/azure-batch/azure/batch/_patch.py index a9f1f6eeca2a..580a19463cac 100644 --- a/sdk/batch/azure-batch/azure/batch/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/_patch.py @@ -146,7 +146,7 @@ class BatchClient(GenerateBatchClient): def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs): super().__init__( endpoint=endpoint, - credential=credential, # type: ignore + credential=credential, # type: ignore authentication_policy=kwargs.pop( "authentication_policy", self._format_shared_key_credential("", credential) ), diff --git a/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py b/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py index b3c12ac94cfb..a449b8db8a5d 100644 --- a/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/aio/_operations/_patch.py @@ -89,9 +89,7 @@ async def create_tasks( kwargs.update({"timeout": timeout, "ocpdate": ocpdate}) results_queue: Deque[_models.BatchTaskAddResult] = collections.deque() - task_workflow_manager = _TaskWorkflowManager( - self, job_id=job_id, task_collection=task_collection, **kwargs - ) + task_workflow_manager = _TaskWorkflowManager(self, job_id=job_id, task_collection=task_collection, **kwargs) if concurrencies: if concurrencies < 0: @@ -230,18 +228,19 @@ async def get_node_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore - pool_id, - node_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_node_file_properties_internal( # type: ignore + pool_id, + node_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -298,18 +297,19 @@ async def get_task_file_properties( creation_time=headers["ocp-creation-time"], # content_type=headers["Content-Type"], # need to add to typespec file_mode=headers["ocp-batch-file-mode"], - ) - - get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore - job_id, - task_id, - file_path, - timeout=timeout, - ocpdate=ocpdate, - if_modified_since=if_modified_since, - if_unmodified_since=if_unmodified_since, + ) + + get_response: _models.BatchFileProperties = super()._get_task_file_properties_internal( # type: ignore + job_id, + task_id, + file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, cls=cls, - **kwargs) + **kwargs + ) return get_response @@ -473,7 +473,7 @@ async def _bulk_add_tasks( self.tasks_to_add.extendleft(chunk_tasks_to_add[midpoint:]) await self._bulk_add_tasks(results_queue, chunk_tasks_to_add[:midpoint]) # Retry server side errors - elif 500 <= e.response.status_code <= 599: # type: ignore + elif 500 <= e.response.status_code <= 599: # type: ignore self.tasks_to_add.extendleft(chunk_tasks_to_add) else: # Re-add to pending queue as unknown status / don't have result @@ -493,9 +493,8 @@ async def _bulk_add_tasks( for task in chunk_tasks_to_add: if task.id == task_result.task_id: self.tasks_to_add.appendleft(task) - elif ( - task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR - and not (task_result.error and task_result.error.code == "TaskExists") + elif task_result.status == _models.BatchTaskAddStatus.CLIENT_ERROR and not ( + task_result.error and task_result.error.code == "TaskExists" ): # Client error will be recorded unless Task already exists self.failure_tasks.appendleft(task_result) diff --git a/sdk/batch/azure-batch/azure/batch/aio/_patch.py b/sdk/batch/azure-batch/azure/batch/aio/_patch.py index 64a3f1262c22..082f6643f95a 100644 --- a/sdk/batch/azure-batch/azure/batch/aio/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/aio/_patch.py @@ -41,7 +41,7 @@ class BatchClient(GenerateBatchClient): def __init__(self, endpoint: str, credential: Union[AzureNamedKeyCredential, TokenCredential], **kwargs): super().__init__( endpoint=endpoint, - credential=credential, # type: ignore + credential=credential, # type: ignore authentication_policy=kwargs.pop("authentication_policy", self._format_shared_key_credential(credential)), **kwargs ) diff --git a/sdk/batch/azure-batch/azure/batch/models/__init__.py b/sdk/batch/azure-batch/azure/batch/models/__init__.py index c54aed6d5845..393ee8aedeb0 100644 --- a/sdk/batch/azure-batch/azure/batch/models/__init__.py +++ b/sdk/batch/azure-batch/azure/batch/models/__init__.py @@ -123,7 +123,6 @@ ExitConditions, ExitOptions, FileProperties, - GetCertificateResponse, HttpHeader, ImageReference, InboundEndpoint, @@ -337,7 +336,6 @@ "ExitConditions", "ExitOptions", "FileProperties", - "GetCertificateResponse", "HttpHeader", "ImageReference", "InboundEndpoint", diff --git a/sdk/batch/azure-batch/azure/batch/models/_models.py b/sdk/batch/azure-batch/azure/batch/models/_models.py index 842decf5de5c..a90f7653330b 100644 --- a/sdk/batch/azure-batch/azure/batch/models/_models.py +++ b/sdk/batch/azure-batch/azure/batch/models/_models.py @@ -11,8 +11,8 @@ import datetime from typing import Any, Dict, List, Mapping, Optional, TYPE_CHECKING, Union, overload -from .. import _model_base -from .._model_base import rest_field +from ....client import _model_base +from ....client._model_base import rest_field if TYPE_CHECKING: from .. import models as _models @@ -62,7 +62,7 @@ class AuthenticationTokenSettings(_model_base.Model): grants access to a limited set of Batch service operations. Currently the only supported value for the access property is 'job', which grants access to all operations related to the Job which contains the Task. - :vartype access: list[str or ~azure.batch.models.AccessScope] + :vartype access: list[str or ~client.models.AccessScope] """ access: Optional[List[Union[str, "_models.AccessScope"]]] = rest_field( @@ -165,7 +165,7 @@ class AutoScaleRun(_model_base.Model): :vartype results: str :ivar error: Details of the error encountered evaluating the autoscale formula on the Pool, if the evaluation was unsuccessful. - :vartype error: ~azure.batch.models.AutoScaleRunError + :vartype error: ~client.models.AutoScaleRunError """ timestamp: datetime.datetime = rest_field( @@ -212,7 +212,7 @@ class AutoScaleRunError(_model_base.Model): a user interface. :vartype message: str :ivar values_property: A list of additional error details related to the autoscale error. - :vartype values_property: list[~azure.batch.models.NameValuePair] + :vartype values_property: list[~client.models.NameValuePair] """ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -254,10 +254,10 @@ class AutoUserSpecification(_model_base.Model): For example, if the task mutates the registry in a way which could impact other tasks, or if certificates have been specified on the pool which should not be accessible by normal tasks but should be accessible by StartTasks. Known values are: "task" and "pool". - :vartype scope: str or ~azure.batch.models.AutoUserScope + :vartype scope: str or ~client.models.AutoUserScope :ivar elevation_level: The elevation level of the auto user. The default value is nonAdmin. Known values are: "nonadmin" and "admin". - :vartype elevation_level: str or ~azure.batch.models.ElevationLevel + :vartype elevation_level: str or ~client.models.ElevationLevel """ scope: Optional[Union[str, "_models.AutoUserScope"]] = rest_field( @@ -316,7 +316,7 @@ class AzureBlobFileSystemConfiguration(_model_base.Model): :ivar identity_reference: The reference to the user assigned identity to use to access containerName. This property is mutually exclusive with both accountKey and sasKey; exactly one must be specified. - :vartype identity_reference: ~azure.batch.models.BatchNodeIdentityReference + :vartype identity_reference: ~client.models.BatchNodeIdentityReference """ account_name: str = rest_field(name="accountName", visibility=["read", "create", "update", "delete", "query"]) @@ -527,7 +527,7 @@ class BatchAutoPoolSpecification(_model_base.Model): :vartype auto_pool_id_prefix: str :ivar pool_lifetime_option: The minimum lifetime of created auto Pools, and how multiple Jobs on a schedule are assigned to Pools. Required. Known values are: "jobschedule" and "job". - :vartype pool_lifetime_option: str or ~azure.batch.models.BatchPoolLifetimeOption + :vartype pool_lifetime_option: str or ~client.models.BatchPoolLifetimeOption :ivar keep_alive: Whether to keep an auto Pool alive after its lifetime expires. If false, the Batch service deletes the Pool once its lifetime (as determined by the poolLifetimeOption setting) expires; that is, when the Job or Job Schedule completes. If true, the Batch service @@ -535,7 +535,7 @@ class BatchAutoPoolSpecification(_model_base.Model): this option. :vartype keep_alive: bool :ivar pool: The Pool specification for the auto Pool. - :vartype pool: ~azure.batch.models.BatchPoolSpecification + :vartype pool: ~client.models.BatchPoolSpecification """ auto_pool_id_prefix: Optional[str] = rest_field( @@ -597,13 +597,13 @@ class BatchCertificate(_model_base.Model): :vartype url: str :ivar state: The state of the Certificate. Known values are: "active", "deleting", and "deletefailed". - :vartype state: str or ~azure.batch.models.BatchCertificateState + :vartype state: str or ~client.models.BatchCertificateState :ivar state_transition_time: The time at which the Certificate entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar previous_state: The previous state of the Certificate. This property is not set if the Certificate is in its initial active state. Known values are: "active", "deleting", and "deletefailed". - :vartype previous_state: str or ~azure.batch.models.BatchCertificateState + :vartype previous_state: str or ~client.models.BatchCertificateState :ivar previous_state_transition_time: The time at which the Certificate entered its previous state. This property is not set if the Certificate is in its initial Active state. :vartype previous_state_transition_time: ~datetime.datetime @@ -611,12 +611,12 @@ class BatchCertificate(_model_base.Model): :vartype public_data: str :ivar delete_certificate_error: The error that occurred on the last attempt to delete this Certificate. This property is set only if the Certificate is in the DeleteFailed state. - :vartype delete_certificate_error: ~azure.batch.models.DeleteBatchCertificateError + :vartype delete_certificate_error: ~client.models.DeleteBatchCertificateError :ivar data: The base64-encoded contents of the Certificate. The maximum size is 10KB. Required. :vartype data: str :ivar certificate_format: The format of the Certificate data. Known values are: "pfx" and "cer". - :vartype certificate_format: str or ~azure.batch.models.BatchCertificateFormat + :vartype certificate_format: str or ~client.models.BatchCertificateFormat :ivar password: The password to access the Certificate's private key. This must be omitted if the Certificate format is cer. :vartype password: str @@ -706,7 +706,7 @@ class BatchCertificateReference(_model_base.Model): Certificates with visibility of 'remoteUser', a 'certs' directory is created in the user's home directory (e.g., /home/{user-name}/certs) and Certificates are placed in that directory. Known values are: "currentuser" and "localmachine". - :vartype store_location: str or ~azure.batch.models.BatchCertificateStoreLocation + :vartype store_location: str or ~client.models.BatchCertificateStoreLocation :ivar store_name: The name of the Certificate store on the Compute Node into which to install the Certificate. This property is applicable only for Pools configured with Windows Compute Nodes (that is, created with cloudServiceConfiguration, or with virtualMachineConfiguration @@ -717,7 +717,7 @@ class BatchCertificateReference(_model_base.Model): :ivar visibility: Which user Accounts on the Compute Node should have access to the private data of the Certificate. You can specify more than one visibility in this collection. The default is all Accounts. - :vartype visibility: list[str or ~azure.batch.models.BatchCertificateVisibility] + :vartype visibility: list[str or ~client.models.BatchCertificateVisibility] """ thumbprint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -782,10 +782,10 @@ class BatchError(_model_base.Model): :vartype code: str :ivar message: A message describing the error, intended to be suitable for display in a user interface. - :vartype message: ~azure.batch.models.BatchErrorMessage + :vartype message: ~client.models.BatchErrorMessage :ivar values_property: A collection of key-value pairs containing additional details about the error. - :vartype values_property: list[~azure.batch.models.BatchErrorDetail] + :vartype values_property: list[~client.models.BatchErrorDetail] """ code: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -913,13 +913,13 @@ class BatchJob(_model_base.Model): :vartype creation_time: ~datetime.datetime :ivar state: The current state of the Job. Known values are: "active", "disabling", "disabled", "enabling", "terminating", "completed", and "deleting". - :vartype state: str or ~azure.batch.models.BatchJobState + :vartype state: str or ~client.models.BatchJobState :ivar state_transition_time: The time at which the Job entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar previous_state: The previous state of the Job. This property is not set if the Job is in its initial Active state. Known values are: "active", "disabling", "disabled", "enabling", "terminating", "completed", and "deleting". - :vartype previous_state: str or ~azure.batch.models.BatchJobState + :vartype previous_state: str or ~client.models.BatchJobState :ivar previous_state_transition_time: The time at which the Job entered its previous state. This property is not set if the Job is in its initial Active state. :vartype previous_state_transition_time: ~datetime.datetime @@ -938,44 +938,44 @@ class BatchJob(_model_base.Model): API. :vartype max_parallel_tasks: int :ivar constraints: The execution constraints for the Job. - :vartype constraints: ~azure.batch.models.BatchJobConstraints + :vartype constraints: ~client.models.BatchJobConstraints :ivar job_manager_task: Details of a Job Manager Task to be launched when the Job is started. - :vartype job_manager_task: ~azure.batch.models.BatchJobManagerTask + :vartype job_manager_task: ~client.models.BatchJobManagerTask :ivar job_preparation_task: The Job Preparation Task. The Job Preparation Task is a special Task run on each Compute Node before any other Task of the Job. - :vartype job_preparation_task: ~azure.batch.models.BatchJobPreparationTask + :vartype job_preparation_task: ~client.models.BatchJobPreparationTask :ivar job_release_task: The Job Release Task. The Job Release Task is a special Task run at the end of the Job on each Compute Node that has run any other Task of the Job. - :vartype job_release_task: ~azure.batch.models.BatchJobReleaseTask + :vartype job_release_task: ~client.models.BatchJobReleaseTask :ivar common_environment_settings: The list of common environment variable settings. These environment variables are set for all Tasks in the Job (including the Job Manager, Job Preparation and Job Release Tasks). Individual Tasks can override an environment setting specified here by specifying the same setting name with a different value. - :vartype common_environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype common_environment_settings: list[~client.models.EnvironmentSetting] :ivar pool_info: The Pool settings associated with the Job. Required. - :vartype pool_info: ~azure.batch.models.BatchPoolInfo + :vartype pool_info: ~client.models.BatchPoolInfo :ivar on_all_tasks_complete: The action the Batch service should take when all Tasks in the Job are in the completed state. The default is noaction. Known values are: "noaction" and "terminatejob". - :vartype on_all_tasks_complete: str or ~azure.batch.models.OnAllBatchTasksComplete + :vartype on_all_tasks_complete: str or ~client.models.OnAllBatchTasksComplete :ivar on_task_failure: The action the Batch service should take when any Task in the Job fails. A Task is considered to have failed if has a failureInfo. A failureInfo is set if the Task completes with a non-zero exit code after exhausting its retry count, or if there was an error starting the Task, for example due to a resource file download error. The default is noaction. Known values are: "noaction" and "performexitoptionsjobaction". - :vartype on_task_failure: str or ~azure.batch.models.OnBatchTaskFailure + :vartype on_task_failure: str or ~client.models.OnBatchTaskFailure :ivar network_configuration: The network configuration for the Job. - :vartype network_configuration: ~azure.batch.models.BatchJobNetworkConfiguration + :vartype network_configuration: ~client.models.BatchJobNetworkConfiguration :ivar metadata: A list of name-value pairs associated with the Job as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar execution_info: The execution information for the Job. - :vartype execution_info: ~azure.batch.models.BatchJobExecutionInfo + :vartype execution_info: ~client.models.BatchJobExecutionInfo :ivar stats: Resource usage statistics for the entire lifetime of the Job. This property is populated only if the BatchJob was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. - :vartype stats: ~azure.batch.models.BatchJobStatistics + :vartype stats: ~client.models.BatchJobStatistics """ id: Optional[str] = rest_field(visibility=["read"]) @@ -1198,7 +1198,7 @@ class BatchJobCreateContent(_model_base.Model): API. :vartype max_parallel_tasks: int :ivar constraints: The execution constraints for the Job. - :vartype constraints: ~azure.batch.models.BatchJobConstraints + :vartype constraints: ~client.models.BatchJobConstraints :ivar job_manager_task: Details of a Job Manager Task to be launched when the Job is started. If the Job does not specify a Job Manager Task, the user must explicitly add Tasks to the Job. If the Job does specify a Job Manager Task, the Batch service creates the Job Manager Task when @@ -1209,25 +1209,25 @@ class BatchJobCreateContent(_model_base.Model): Task in the system and perform whatever actions are required for the Job.) For example, a Job Manager Task might download a file specified as a parameter, analyze the contents of that file and submit additional Tasks based on those contents. - :vartype job_manager_task: ~azure.batch.models.BatchJobManagerTask + :vartype job_manager_task: ~client.models.BatchJobManagerTask :ivar job_preparation_task: The Job Preparation Task. If a Job has a Job Preparation Task, the Batch service will run the Job Preparation Task on a Node before starting any Tasks of that Job on that Compute Node. - :vartype job_preparation_task: ~azure.batch.models.BatchJobPreparationTask + :vartype job_preparation_task: ~client.models.BatchJobPreparationTask :ivar job_release_task: The Job Release Task. A Job Release Task cannot be specified without also specifying a Job Preparation Task for the Job. The Batch service runs the Job Release Task on the Nodes that have run the Job Preparation Task. The primary purpose of the Job Release Task is to undo changes to Compute Nodes made by the Job Preparation Task. Example activities include deleting local files, or shutting down services that were started as part of Job preparation. - :vartype job_release_task: ~azure.batch.models.BatchJobReleaseTask + :vartype job_release_task: ~client.models.BatchJobReleaseTask :ivar common_environment_settings: The list of common environment variable settings. These environment variables are set for all Tasks in the Job (including the Job Manager, Job Preparation and Job Release Tasks). Individual Tasks can override an environment setting specified here by specifying the same setting name with a different value. - :vartype common_environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype common_environment_settings: list[~client.models.EnvironmentSetting] :ivar pool_info: The Pool on which the Batch service runs the Job's Tasks. Required. - :vartype pool_info: ~azure.batch.models.BatchPoolInfo + :vartype pool_info: ~client.models.BatchPoolInfo :ivar on_all_tasks_complete: The action the Batch service should take when all Tasks in the Job are in the completed state. Note that if a Job contains no Tasks, then all Tasks are considered complete. This option is therefore most commonly used with a Job Manager task; if you want to @@ -1235,18 +1235,18 @@ class BatchJobCreateContent(_model_base.Model): onAllTasksComplete to noaction and update the Job properties to set onAllTasksComplete to terminatejob once you have finished adding Tasks. The default is noaction. Known values are: "noaction" and "terminatejob". - :vartype on_all_tasks_complete: str or ~azure.batch.models.OnAllBatchTasksComplete + :vartype on_all_tasks_complete: str or ~client.models.OnAllBatchTasksComplete :ivar on_task_failure: The action the Batch service should take when any Task in the Job fails. A Task is considered to have failed if has a failureInfo. A failureInfo is set if the Task completes with a non-zero exit code after exhausting its retry count, or if there was an error starting the Task, for example due to a resource file download error. The default is noaction. Known values are: "noaction" and "performexitoptionsjobaction". - :vartype on_task_failure: str or ~azure.batch.models.OnBatchTaskFailure + :vartype on_task_failure: str or ~client.models.OnBatchTaskFailure :ivar network_configuration: The network configuration for the Job. - :vartype network_configuration: ~azure.batch.models.BatchJobNetworkConfiguration + :vartype network_configuration: ~client.models.BatchJobNetworkConfiguration :ivar metadata: A list of name-value pairs associated with the Job as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -1386,7 +1386,7 @@ class BatchJobDisableContent(_model_base.Model): :ivar disable_tasks: What to do with active Tasks associated with the Job. Required. Known values are: "requeue", "terminate", and "wait". - :vartype disable_tasks: str or ~azure.batch.models.DisableBatchJobOption + :vartype disable_tasks: str or ~client.models.DisableBatchJobOption """ disable_tasks: Union[str, "_models.DisableBatchJobOption"] = rest_field( @@ -1431,7 +1431,7 @@ class BatchJobExecutionInfo(_model_base.Model): :vartype pool_id: str :ivar scheduling_error: Details of any error encountered by the service in starting the Job. This property is not set if there was no error starting the Job. - :vartype scheduling_error: ~azure.batch.models.BatchJobSchedulingError + :vartype scheduling_error: ~client.models.BatchJobSchedulingError :ivar termination_reason: A string describing the reason the Job ended. This property is set only if the Job is in the completed state. If the Batch service terminates the Job, it sets the reason as follows: JMComplete - the Job Manager Task completed, and killJobOnCompletion was set @@ -1551,22 +1551,22 @@ class BatchJobManagerTask(_model_base.Model): the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. Files listed under this element are located in the Task's working directory. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar output_files: A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. - :vartype output_files: list[~azure.batch.models.OutputFile] + :vartype output_files: list[~client.models.OutputFile] :ivar environment_settings: A list of environment variable settings for the Job Manager Task. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar constraints: Constraints that apply to the Job Manager Task. - :vartype constraints: ~azure.batch.models.BatchTaskConstraints + :vartype constraints: ~client.models.BatchTaskConstraints :ivar required_slots: The number of scheduling slots that the Task requires to run. The default is 1. A Task can only be scheduled to run on a compute node if the node has enough free scheduling slots available. For multi-instance Tasks, this property is not supported and must @@ -1585,7 +1585,7 @@ class BatchJobManagerTask(_model_base.Model): :vartype kill_job_on_completion: bool :ivar user_identity: The user identity under which the Job Manager Task runs. If omitted, the Task runs as a non-administrative user unique to the Task. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity :ivar run_exclusive: Whether the Job Manager Task requires exclusive use of the Compute Node where it runs. If true, no other Tasks will run on the same Node for as long as the Job Manager is running. If false, other Tasks can run simultaneously with the Job Manager on a Compute @@ -1602,8 +1602,7 @@ class BatchJobManagerTask(_model_base.Model): the existing copy on the Compute Node is used. If a referenced Application Package cannot be installed, for example because the package has been deleted or because download failed, the Task fails. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar authentication_token_settings: The settings for an authentication token that the Task can use to perform Batch service operations. If this property is set, the Batch service provides the Task with an authentication token which can be used to authenticate Batch service @@ -1612,7 +1611,7 @@ class BatchJobManagerTask(_model_base.Model): using the token depend on the settings. For example, a Task can request Job permissions in order to add other Tasks to the Job, or check the status of the Job or of other Tasks under the Job. - :vartype authentication_token_settings: ~azure.batch.models.AuthenticationTokenSettings + :vartype authentication_token_settings: ~client.models.AuthenticationTokenSettings :ivar allow_low_priority_node: Whether the Job Manager Task may run on a Spot/Low-priority Compute Node. The default value is true. :vartype allow_low_priority_node: bool @@ -1851,11 +1850,11 @@ class BatchJobPreparationAndReleaseTaskStatus(_model_base.Model): :ivar job_preparation_task_execution_info: Information about the execution status of the Job Preparation Task on this Compute Node. :vartype job_preparation_task_execution_info: - ~azure.batch.models.BatchJobPreparationTaskExecutionInfo + ~client.models.BatchJobPreparationTaskExecutionInfo :ivar job_release_task_execution_info: Information about the execution status of the Job Release Task on this Compute Node. This property is set only if the Job Release Task has run on the Compute Node. - :vartype job_release_task_execution_info: ~azure.batch.models.BatchJobReleaseTaskExecutionInfo + :vartype job_release_task_execution_info: ~client.models.BatchJobReleaseTaskExecutionInfo """ pool_id: Optional[str] = rest_field(name="poolId", visibility=["read", "create", "update", "delete", "query"]) @@ -1949,19 +1948,19 @@ class BatchJobPreparationTask(_model_base.Model): the container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. Files listed under this element are located in the Task's working directory. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar environment_settings: A list of environment variable settings for the Job Preparation Task. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar constraints: Constraints that apply to the Job Preparation Task. - :vartype constraints: ~azure.batch.models.BatchTaskConstraints + :vartype constraints: ~client.models.BatchTaskConstraints :ivar wait_for_success: Whether the Batch service should wait for the Job Preparation Task to complete successfully before scheduling any other Tasks of the Job on the Compute Node. A Job Preparation Task has completed successfully if it exits with exit code 0. If true and the Job @@ -1977,7 +1976,7 @@ class BatchJobPreparationTask(_model_base.Model): :ivar user_identity: The user identity under which the Job Preparation Task runs. If omitted, the Task runs as a non-administrative user unique to the Task on Windows Compute Nodes, or a non-administrative user unique to the Pool on Linux Compute Nodes. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity :ivar rerun_on_node_reboot_after_success: Whether the Batch service should rerun the Job Preparation Task after a Compute Node reboots. The Job Preparation Task is always rerun if a Compute Node is reimaged, or if the Job Preparation Task did not complete (e.g. because the @@ -2098,7 +2097,7 @@ class BatchJobPreparationTaskExecutionInfo(_model_base.Model): :vartype end_time: ~datetime.datetime :ivar state: The current state of the Job Preparation Task on the Compute Node. Required. Known values are: "running" and "completed". - :vartype state: str or ~azure.batch.models.BatchJobPreparationTaskState + :vartype state: str or ~client.models.BatchJobPreparationTaskState :ivar task_root_directory: The root directory of the Job Preparation Task on the Compute Node. You can use this path to retrieve files created by the Task, such as log files. :vartype task_root_directory: str @@ -2114,10 +2113,10 @@ class BatchJobPreparationTaskExecutionInfo(_model_base.Model): :vartype exit_code: int :ivar container_info: Information about the container under which the Task is executing. This property is set only if the Task runs in a container context. - :vartype container_info: ~azure.batch.models.BatchTaskContainerExecutionInfo + :vartype container_info: ~client.models.BatchTaskContainerExecutionInfo :ivar failure_info: Information describing the Task failure, if any. This property is set only if the Task is in the completed state and encountered a failure. - :vartype failure_info: ~azure.batch.models.BatchTaskFailureInfo + :vartype failure_info: ~client.models.BatchTaskFailureInfo :ivar retry_count: The number of times the Task has been retried by the Batch service. Task application failures (non-zero exit code) are retried, pre-processing errors (the Task could not be run) and file upload errors are not retried. The Batch service will retry the Task up to @@ -2135,7 +2134,7 @@ class BatchJobPreparationTaskExecutionInfo(_model_base.Model): :ivar result: The result of the Task execution. If the value is 'failed', then the details of the failure can be found in the failureInfo property. Known values are: "success" and "failure". - :vartype result: str or ~azure.batch.models.BatchTaskExecutionResult + :vartype result: str or ~client.models.BatchTaskExecutionResult """ start_time: datetime.datetime = rest_field( @@ -2271,16 +2270,16 @@ class BatchJobReleaseTask(_model_base.Model): container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. Files listed under this element are located in the Task's working directory. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar environment_settings: A list of environment variable settings for the Job Release Task. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar max_wall_clock_time: The maximum elapsed time that the Job Release Task may run on a given Compute Node, measured from the time the Task starts. If the Task does not complete within the time limit, the Batch service terminates it. The default value is 15 minutes. You @@ -2294,7 +2293,7 @@ class BatchJobReleaseTask(_model_base.Model): :vartype retention_time: ~datetime.timedelta :ivar user_identity: The user identity under which the Job Release Task runs. If omitted, the Task runs as a non-administrative user unique to the Task. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity """ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -2395,7 +2394,7 @@ class BatchJobReleaseTaskExecutionInfo(_model_base.Model): :vartype end_time: ~datetime.datetime :ivar state: The current state of the Job Release Task on the Compute Node. Required. Known values are: "running" and "completed". - :vartype state: str or ~azure.batch.models.BatchJobReleaseTaskState + :vartype state: str or ~client.models.BatchJobReleaseTaskState :ivar task_root_directory: The root directory of the Job Release Task on the Compute Node. You can use this path to retrieve files created by the Task, such as log files. :vartype task_root_directory: str @@ -2411,14 +2410,14 @@ class BatchJobReleaseTaskExecutionInfo(_model_base.Model): :vartype exit_code: int :ivar container_info: Information about the container under which the Task is executing. This property is set only if the Task runs in a container context. - :vartype container_info: ~azure.batch.models.BatchTaskContainerExecutionInfo + :vartype container_info: ~client.models.BatchTaskContainerExecutionInfo :ivar failure_info: Information describing the Task failure, if any. This property is set only if the Task is in the completed state and encountered a failure. - :vartype failure_info: ~azure.batch.models.BatchTaskFailureInfo + :vartype failure_info: ~client.models.BatchTaskFailureInfo :ivar result: The result of the Task execution. If the value is 'failed', then the details of the failure can be found in the failureInfo property. Known values are: "success" and "failure". - :vartype result: str or ~azure.batch.models.BatchTaskExecutionResult + :vartype result: str or ~client.models.BatchTaskExecutionResult """ start_time: datetime.datetime = rest_field( @@ -2517,31 +2516,31 @@ class BatchJobSchedule(_model_base.Model): :vartype creation_time: ~datetime.datetime :ivar state: The current state of the Job Schedule. Known values are: "active", "completed", "disabled", "terminating", and "deleting". - :vartype state: str or ~azure.batch.models.BatchJobScheduleState + :vartype state: str or ~client.models.BatchJobScheduleState :ivar state_transition_time: The time at which the Job Schedule entered the current state. :vartype state_transition_time: ~datetime.datetime :ivar previous_state: The previous state of the Job Schedule. This property is not present if the Job Schedule is in its initial active state. Known values are: "active", "completed", "disabled", "terminating", and "deleting". - :vartype previous_state: str or ~azure.batch.models.BatchJobScheduleState + :vartype previous_state: str or ~client.models.BatchJobScheduleState :ivar previous_state_transition_time: The time at which the Job Schedule entered its previous state. This property is not present if the Job Schedule is in its initial active state. :vartype previous_state_transition_time: ~datetime.datetime :ivar schedule: The schedule according to which Jobs will be created. All times are fixed respective to UTC and are not impacted by daylight saving time. - :vartype schedule: ~azure.batch.models.BatchJobScheduleConfiguration + :vartype schedule: ~client.models.BatchJobScheduleConfiguration :ivar job_specification: The details of the Jobs to be created on this schedule. Required. - :vartype job_specification: ~azure.batch.models.BatchJobSpecification + :vartype job_specification: ~client.models.BatchJobSpecification :ivar execution_info: Information about Jobs that have been and will be run under this schedule. - :vartype execution_info: ~azure.batch.models.BatchJobScheduleExecutionInfo + :vartype execution_info: ~client.models.BatchJobScheduleExecutionInfo :ivar metadata: A list of name-value pairs associated with the schedule as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar stats: The lifetime resource usage statistics for the Job Schedule. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. - :vartype stats: ~azure.batch.models.BatchJobScheduleStatistics + :vartype stats: ~client.models.BatchJobScheduleStatistics """ id: Optional[str] = rest_field(visibility=["read"]) @@ -2739,12 +2738,12 @@ class BatchJobScheduleCreateContent(_model_base.Model): :vartype display_name: str :ivar schedule: The schedule according to which Jobs will be created. All times are fixed respective to UTC and are not impacted by daylight saving time. Required. - :vartype schedule: ~azure.batch.models.BatchJobScheduleConfiguration + :vartype schedule: ~client.models.BatchJobScheduleConfiguration :ivar job_specification: The details of the Jobs to be created on this schedule. Required. - :vartype job_specification: ~azure.batch.models.BatchJobSpecification + :vartype job_specification: ~client.models.BatchJobSpecification :ivar metadata: A list of name-value pairs associated with the schedule as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -2805,7 +2804,7 @@ class BatchJobScheduleExecutionInfo(_model_base.Model): :vartype next_run_time: ~datetime.datetime :ivar recent_job: Information about the most recent Job under the Job Schedule. This property is present only if the at least one Job has run under the schedule. - :vartype recent_job: ~azure.batch.models.RecentBatchJob + :vartype recent_job: ~client.models.RecentBatchJob :ivar end_time: The time at which the schedule ended. This property is set only if the Job Schedule is in the completed state. :vartype end_time: ~datetime.datetime @@ -3009,14 +3008,14 @@ class BatchJobScheduleUpdateContent(_model_base.Model): :ivar schedule: The schedule according to which Jobs will be created. All times are fixed respective to UTC and are not impacted by daylight saving time. If you do not specify this element, the existing schedule is left unchanged. - :vartype schedule: ~azure.batch.models.BatchJobScheduleConfiguration + :vartype schedule: ~client.models.BatchJobScheduleConfiguration :ivar job_specification: The details of the Jobs to be created on this schedule. Updates affect only Jobs that are started after the update has taken place. Any currently active Job continues with the older specification. - :vartype job_specification: ~azure.batch.models.BatchJobSpecification + :vartype job_specification: ~client.models.BatchJobSpecification :ivar metadata: A list of name-value pairs associated with the Job Schedule as metadata. If you do not specify this element, existing metadata is left unchanged. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] """ schedule: Optional["_models.BatchJobScheduleConfiguration"] = rest_field( @@ -3062,7 +3061,7 @@ class BatchJobSchedulingError(_model_base.Model): :ivar category: The category of the Job scheduling error. Required. Known values are: "usererror" and "servererror". - :vartype category: str or ~azure.batch.models.ErrorCategory + :vartype category: str or ~client.models.ErrorCategory :ivar code: An identifier for the Job scheduling error. Codes are invariant and are intended to be consumed programmatically. :vartype code: str @@ -3070,7 +3069,7 @@ class BatchJobSchedulingError(_model_base.Model): display in a user interface. :vartype message: str :ivar details: A list of additional error details related to the scheduling error. - :vartype details: list[~azure.batch.models.NameValuePair] + :vartype details: list[~client.models.NameValuePair] """ category: Union[str, "_models.ErrorCategory"] = rest_field( @@ -3143,47 +3142,47 @@ class BatchJobSpecification(_model_base.Model): initially set onAllTasksComplete to noaction and update the Job properties to set onAllTasksComplete to terminatejob once you have finished adding Tasks. The default is noaction. Known values are: "noaction" and "terminatejob". - :vartype on_all_tasks_complete: str or ~azure.batch.models.OnAllBatchTasksComplete + :vartype on_all_tasks_complete: str or ~client.models.OnAllBatchTasksComplete :ivar on_task_failure: The action the Batch service should take when any Task fails in a Job created under this schedule. A Task is considered to have failed if it have failed if has a failureInfo. A failureInfo is set if the Task completes with a non-zero exit code after exhausting its retry count, or if there was an error starting the Task, for example due to a resource file download error. The default is noaction. Known values are: "noaction" and "performexitoptionsjobaction". - :vartype on_task_failure: str or ~azure.batch.models.OnBatchTaskFailure + :vartype on_task_failure: str or ~client.models.OnBatchTaskFailure :ivar network_configuration: The network configuration for the Job. - :vartype network_configuration: ~azure.batch.models.BatchJobNetworkConfiguration + :vartype network_configuration: ~client.models.BatchJobNetworkConfiguration :ivar constraints: The execution constraints for Jobs created under this schedule. - :vartype constraints: ~azure.batch.models.BatchJobConstraints + :vartype constraints: ~client.models.BatchJobConstraints :ivar job_manager_task: The details of a Job Manager Task to be launched when a Job is started under this schedule. If the Job does not specify a Job Manager Task, the user must explicitly add Tasks to the Job using the Task API. If the Job does specify a Job Manager Task, the Batch service creates the Job Manager Task when the Job is created, and will try to schedule the Job Manager Task before scheduling other Tasks in the Job. - :vartype job_manager_task: ~azure.batch.models.BatchJobManagerTask + :vartype job_manager_task: ~client.models.BatchJobManagerTask :ivar job_preparation_task: The Job Preparation Task for Jobs created under this schedule. If a Job has a Job Preparation Task, the Batch service will run the Job Preparation Task on a Node before starting any Tasks of that Job on that Compute Node. - :vartype job_preparation_task: ~azure.batch.models.BatchJobPreparationTask + :vartype job_preparation_task: ~client.models.BatchJobPreparationTask :ivar job_release_task: The Job Release Task for Jobs created under this schedule. The primary purpose of the Job Release Task is to undo changes to Nodes made by the Job Preparation Task. Example activities include deleting local files, or shutting down services that were started as part of Job preparation. A Job Release Task cannot be specified without also specifying a Job Preparation Task for the Job. The Batch service runs the Job Release Task on the Compute Nodes that have run the Job Preparation Task. - :vartype job_release_task: ~azure.batch.models.BatchJobReleaseTask + :vartype job_release_task: ~client.models.BatchJobReleaseTask :ivar common_environment_settings: A list of common environment variable settings. These environment variables are set for all Tasks in Jobs created under this schedule (including the Job Manager, Job Preparation and Job Release Tasks). Individual Tasks can override an environment setting specified here by specifying the same setting name with a different value. - :vartype common_environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype common_environment_settings: list[~client.models.EnvironmentSetting] :ivar pool_info: The Pool on which the Batch service runs the Tasks of Jobs created under this schedule. Required. - :vartype pool_info: ~azure.batch.models.BatchPoolInfo + :vartype pool_info: ~client.models.BatchPoolInfo :ivar metadata: A list of name-value pairs associated with each Job created under this schedule as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] """ priority: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3511,26 +3510,26 @@ class BatchJobUpdateContent(_model_base.Model): :vartype max_parallel_tasks: int :ivar constraints: The execution constraints for the Job. If omitted, the existing execution constraints are left unchanged. - :vartype constraints: ~azure.batch.models.BatchJobConstraints + :vartype constraints: ~client.models.BatchJobConstraints :ivar pool_info: The Pool on which the Batch service runs the Job's Tasks. You may change the Pool for a Job only when the Job is disabled. The Patch Job call will fail if you include the poolInfo element and the Job is not disabled. If you specify an autoPoolSpecification in the poolInfo, only the keepAlive property of the autoPoolSpecification can be updated, and then only if the autoPoolSpecification has a poolLifetimeOption of Job (other job properties can be updated as normal). If omitted, the Job continues to run on its current Pool. - :vartype pool_info: ~azure.batch.models.BatchPoolInfo + :vartype pool_info: ~client.models.BatchPoolInfo :ivar on_all_tasks_complete: The action the Batch service should take when all Tasks in the Job are in the completed state. If omitted, the completion behavior is left unchanged. You may not change the value from terminatejob to noaction - that is, once you have engaged automatic Job termination, you cannot turn it off again. If you try to do this, the request fails with an 'invalid property value' error response; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). Known values are: "noaction" and "terminatejob". - :vartype on_all_tasks_complete: str or ~azure.batch.models.OnAllBatchTasksComplete + :vartype on_all_tasks_complete: str or ~client.models.OnAllBatchTasksComplete :ivar metadata: A list of name-value pairs associated with the Job as metadata. If omitted, the existing Job metadata is left unchanged. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar network_configuration: The network configuration for the Job. - :vartype network_configuration: ~azure.batch.models.BatchJobNetworkConfiguration + :vartype network_configuration: ~client.models.BatchJobNetworkConfiguration """ priority: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -3624,10 +3623,10 @@ class BatchNode(_model_base.Model): "reimaging", "running", "unusable", "creating", "starting", "waitingforstarttask", "starttaskfailed", "unknown", "leavingpool", "offline", "preempted", "upgradingos", "deallocated", and "deallocating". - :vartype state: str or ~azure.batch.models.BatchNodeState + :vartype state: str or ~client.models.BatchNodeState :ivar scheduling_state: Whether the Compute Node is available for Task scheduling. Known values are: "enabled" and "disabled". - :vartype scheduling_state: str or ~azure.batch.models.SchedulingState + :vartype scheduling_state: str or ~client.models.SchedulingState :ivar state_transition_time: The time at which the Compute Node entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar last_boot_time: The last time at which the Compute Node was started. This property may @@ -3670,12 +3669,12 @@ class BatchNode(_model_base.Model): :vartype total_tasks_succeeded: int :ivar recent_tasks: A list of Tasks whose state has recently changed. This property is present only if at least one Task has run on this Compute Node since it was assigned to the Pool. - :vartype recent_tasks: list[~azure.batch.models.BatchTaskInfo] + :vartype recent_tasks: list[~client.models.BatchTaskInfo] :ivar start_task: The Task specified to run on the Compute Node as it joins the Pool. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar start_task_info: Runtime information about the execution of the StartTask on the Compute Node. - :vartype start_task_info: ~azure.batch.models.BatchStartTaskInfo + :vartype start_task_info: ~client.models.BatchStartTaskInfo :ivar certificate_references: For Windows Nodes, the Batch service installs the Certificates to the specified Certificate store and location. For Linux Compute Nodes, the Certificates are stored in a directory inside the Task working @@ -3686,19 +3685,19 @@ class BatchNode(_model_base.Model): Warning: This property is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ instead. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar errors: The list of errors that are currently being encountered by the Compute Node. - :vartype errors: list[~azure.batch.models.BatchNodeError] + :vartype errors: list[~client.models.BatchNodeError] :ivar is_dedicated: Whether this Compute Node is a dedicated Compute Node. If false, the Compute Node is a Spot/Low-priority Compute Node. :vartype is_dedicated: bool :ivar endpoint_configuration: The endpoint configuration for the Compute Node. - :vartype endpoint_configuration: ~azure.batch.models.BatchNodeEndpointConfiguration + :vartype endpoint_configuration: ~client.models.BatchNodeEndpointConfiguration :ivar node_agent_info: Information about the Compute Node agent version and the time the Compute Node upgraded to a new version. - :vartype node_agent_info: ~azure.batch.models.BatchNodeAgentInfo + :vartype node_agent_info: ~client.models.BatchNodeAgentInfo :ivar virtual_machine_info: Info about the current state of the virtual machine. - :vartype virtual_machine_info: ~azure.batch.models.VirtualMachineInfo + :vartype virtual_machine_info: ~client.models.VirtualMachineInfo """ id: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -4027,7 +4026,7 @@ class BatchNodeDeallocateContent(_model_base.Model): :ivar node_deallocate_option: When to deallocate the Compute Node and what to do with currently running Tasks. The default value is requeue. Known values are: "requeue", "terminate", "taskcompletion", and "retaineddata". - :vartype node_deallocate_option: str or ~azure.batch.models.BatchNodeDeallocateOption + :vartype node_deallocate_option: str or ~client.models.BatchNodeDeallocateOption """ node_deallocate_option: Optional[Union[str, "_models.BatchNodeDeallocateOption"]] = rest_field( @@ -4061,8 +4060,7 @@ class BatchNodeDisableSchedulingContent(_model_base.Model): :ivar node_disable_scheduling_option: What to do with currently running Tasks when disabling Task scheduling on the Compute Node. The default value is requeue. Known values are: "requeue", "terminate", and "taskcompletion". - :vartype node_disable_scheduling_option: str or - ~azure.batch.models.BatchNodeDisableSchedulingOption + :vartype node_disable_scheduling_option: str or ~client.models.BatchNodeDisableSchedulingOption """ node_disable_scheduling_option: Optional[Union[str, "_models.BatchNodeDisableSchedulingOption"]] = rest_field( @@ -4094,7 +4092,7 @@ class BatchNodeEndpointConfiguration(_model_base.Model): :ivar inbound_endpoints: The list of inbound endpoints that are accessible on the Compute Node. Required. - :vartype inbound_endpoints: list[~azure.batch.models.InboundEndpoint] + :vartype inbound_endpoints: list[~client.models.InboundEndpoint] """ inbound_endpoints: List["_models.InboundEndpoint"] = rest_field( @@ -4130,7 +4128,7 @@ class BatchNodeError(_model_base.Model): in a user interface. :vartype message: str :ivar error_details: The list of additional error details related to the Compute Node error. - :vartype error_details: list[~azure.batch.models.NameValuePair] + :vartype error_details: list[~client.models.NameValuePair] """ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -4174,7 +4172,7 @@ class BatchNodeFile(_model_base.Model): :ivar is_directory: Whether the object represents a directory. :vartype is_directory: bool :ivar properties: The file properties. - :vartype properties: ~azure.batch.models.FileProperties + :vartype properties: ~client.models.FileProperties """ name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -4311,7 +4309,7 @@ class BatchNodePlacementConfiguration(_model_base.Model): :ivar policy: Node placement Policy type on Batch Pools. Allocation policy used by Batch Service to provision the nodes. If not specified, Batch will use the regional policy. Known values are: "regional" and "zonal". - :vartype policy: str or ~azure.batch.models.BatchNodePlacementPolicyType + :vartype policy: str or ~client.models.BatchNodePlacementPolicyType """ policy: Optional[Union[str, "_models.BatchNodePlacementPolicyType"]] = rest_field( @@ -4345,7 +4343,7 @@ class BatchNodeRebootContent(_model_base.Model): :ivar node_reboot_option: When to reboot the Compute Node and what to do with currently running Tasks. The default value is requeue. Known values are: "requeue", "terminate", "taskcompletion", and "retaineddata". - :vartype node_reboot_option: str or ~azure.batch.models.BatchNodeRebootOption + :vartype node_reboot_option: str or ~client.models.BatchNodeRebootOption """ node_reboot_option: Optional[Union[str, "_models.BatchNodeRebootOption"]] = rest_field( @@ -4379,7 +4377,7 @@ class BatchNodeReimageContent(_model_base.Model): :ivar node_reimage_option: When to reimage the Compute Node and what to do with currently running Tasks. The default value is requeue. Known values are: "requeue", "terminate", "taskcompletion", and "retaineddata". - :vartype node_reimage_option: str or ~azure.batch.models.BatchNodeReimageOption + :vartype node_reimage_option: str or ~client.models.BatchNodeReimageOption """ node_reimage_option: Optional[Union[str, "_models.BatchNodeReimageOption"]] = rest_field( @@ -4459,7 +4457,7 @@ class BatchNodeRemoveContent(_model_base.Model): :ivar node_deallocation_option: Determines what to do with a Compute Node and its running task(s) after it has been selected for deallocation. The default value is requeue. Known values are: "requeue", "terminate", "taskcompletion", and "retaineddata". - :vartype node_deallocation_option: str or ~azure.batch.models.BatchNodeDeallocationOption + :vartype node_deallocation_option: str or ~client.models.BatchNodeDeallocationOption """ node_list: List[str] = rest_field(name="nodeList", visibility=["read", "create", "update", "delete", "query"]) @@ -4628,9 +4626,9 @@ class BatchNodeVMExtension(_model_base.Model): :ivar provisioning_state: The provisioning state of the virtual machine extension. :vartype provisioning_state: str :ivar vm_extension: The virtual machine extension. - :vartype vm_extension: ~azure.batch.models.VMExtension + :vartype vm_extension: ~client.models.VMExtension :ivar instance_view: The vm extension instance view. - :vartype instance_view: ~azure.batch.models.VMExtensionInstanceView + :vartype instance_view: ~client.models.VMExtensionInstanceView """ provisioning_state: Optional[str] = rest_field( @@ -4691,12 +4689,12 @@ class BatchPool(_model_base.Model): :ivar creation_time: The creation time of the Pool. :vartype creation_time: ~datetime.datetime :ivar state: The current state of the Pool. Known values are: "active" and "deleting". - :vartype state: str or ~azure.batch.models.BatchPoolState + :vartype state: str or ~client.models.BatchPoolState :ivar state_transition_time: The time at which the Pool entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar allocation_state: Whether the Pool is resizing. Known values are: "steady", "resizing", and "stopping". - :vartype allocation_state: str or ~azure.batch.models.AllocationState + :vartype allocation_state: str or ~client.models.AllocationState :ivar allocation_state_transition_time: The time at which the Pool entered its current allocation state. :vartype allocation_state_transition_time: ~datetime.datetime @@ -4709,7 +4707,7 @@ class BatchPool(_model_base.Model): :vartype vm_size: str :ivar virtual_machine_configuration: The virtual machine configuration for the Pool. This property must be specified. - :vartype virtual_machine_configuration: ~azure.batch.models.VirtualMachineConfiguration + :vartype virtual_machine_configuration: ~client.models.VirtualMachineConfiguration :ivar resize_timeout: The timeout for allocation of Compute Nodes to the Pool. This is the timeout for the most recent resize operation. (The initial sizing when the Pool is created counts as a resize.) The default value is 15 minutes. @@ -4717,7 +4715,7 @@ class BatchPool(_model_base.Model): :ivar resize_errors: A list of errors encountered while performing the last resize on the Pool. This property is set only if one or more errors occurred during the last Pool resize, and only when the Pool allocationState is Steady. - :vartype resize_errors: list[~azure.batch.models.ResizeError] + :vartype resize_errors: list[~client.models.ResizeError] :ivar resource_tags: The user-specified tags associated with the pool. The user-defined tags to be associated with the Azure Batch Pool. When specified, these tags are propagated to the backing Azure resources associated with the pool. This property can only be specified when the @@ -4747,16 +4745,16 @@ class BatchPool(_model_base.Model): :vartype auto_scale_evaluation_interval: ~datetime.timedelta :ivar auto_scale_run: The results and errors from the last execution of the autoscale formula. This property is set only if the Pool automatically scales, i.e. enableAutoScale is true. - :vartype auto_scale_run: ~azure.batch.models.AutoScaleRun + :vartype auto_scale_run: ~client.models.AutoScaleRun :ivar enable_inter_node_communication: Whether the Pool permits direct communication between Compute Nodes. This imposes restrictions on which Compute Nodes can be assigned to the Pool. Specifying this value can reduce the chance of the requested number of Compute Nodes to be allocated in the Pool. :vartype enable_inter_node_communication: bool :ivar network_configuration: The network configuration for the Pool. - :vartype network_configuration: ~azure.batch.models.NetworkConfiguration + :vartype network_configuration: ~client.models.NetworkConfiguration :ivar start_task: A Task specified to run on each Compute Node as it joins the Pool. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar certificate_references: For Windows Nodes, the Batch service installs the Certificates to the specified Certificate store and location. For Linux Compute Nodes, the Certificates are stored in a directory inside the Task working @@ -4767,48 +4765,47 @@ class BatchPool(_model_base.Model): Warning: This property is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ instead. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar application_package_references: The list of Packages to be installed on each Compute Node in the Pool. Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. There is a maximum of 10 Package references on any given Pool. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar task_slots_per_node: The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256. :vartype task_slots_per_node: int :ivar task_scheduling_policy: How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. - :vartype task_scheduling_policy: ~azure.batch.models.BatchTaskSchedulingPolicy + :vartype task_scheduling_policy: ~client.models.BatchTaskSchedulingPolicy :ivar user_accounts: The list of user Accounts to be created on each Compute Node in the Pool. - :vartype user_accounts: list[~azure.batch.models.UserAccount] + :vartype user_accounts: list[~client.models.UserAccount] :ivar metadata: A list of name-value pairs associated with the Pool as metadata. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar stats: Utilization and resource usage statistics for the entire lifetime of the Pool. This property is populated only if the BatchPool was retrieved with an expand clause including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch service performs periodic roll-up of statistics. The typical delay is about 30 minutes. - :vartype stats: ~azure.batch.models.BatchPoolStatistics + :vartype stats: ~client.models.BatchPoolStatistics :ivar mount_configuration: A list of file systems to mount on each node in the pool. This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. - :vartype mount_configuration: list[~azure.batch.models.MountConfiguration] + :vartype mount_configuration: list[~client.models.MountConfiguration] :ivar identity: The identity of the Batch pool, if configured. The list of user identities associated with the Batch pool. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :vartype identity: ~azure.batch.models.BatchPoolIdentity + :vartype identity: ~client.models.BatchPoolIdentity :ivar target_node_communication_mode: The desired node communication mode for the pool. If omitted, the default value is Default. Known values are: "default", "classic", and "simplified". - :vartype target_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype target_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode :ivar current_node_communication_mode: The current state of the pool communication mode. Known values are: "default", "classic", and "simplified". - :vartype current_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype current_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode :ivar upgrade_policy: The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling. - :vartype upgrade_policy: ~azure.batch.models.UpgradePolicy + :vartype upgrade_policy: ~client.models.UpgradePolicy """ id: Optional[str] = rest_field(visibility=["read"]) @@ -5013,7 +5010,7 @@ class BatchPoolCreateContent(_model_base.Model): :vartype vm_size: str :ivar virtual_machine_configuration: The virtual machine configuration for the Pool. This property must be specified. - :vartype virtual_machine_configuration: ~azure.batch.models.VirtualMachineConfiguration + :vartype virtual_machine_configuration: ~client.models.VirtualMachineConfiguration :ivar resize_timeout: The timeout for allocation of Compute Nodes to the Pool. This timeout applies only to manual scaling; it has no effect when enableAutoScale is set to true. The default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 @@ -5060,10 +5057,10 @@ class BatchPoolCreateContent(_model_base.Model): reaching its desired size. The default value is false. :vartype enable_inter_node_communication: bool :ivar network_configuration: The network configuration for the Pool. - :vartype network_configuration: ~azure.batch.models.NetworkConfiguration + :vartype network_configuration: ~client.models.NetworkConfiguration :ivar start_task: A Task specified to run on each Compute Node as it joins the Pool. The Task runs when the Compute Node is added to the Pool or when the Compute Node is restarted. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar certificate_references: For Windows Nodes, the Batch service installs the Certificates to the specified Certificate store and location. For Linux Compute Nodes, the Certificates are stored in a directory inside the Task working @@ -5074,37 +5071,36 @@ class BatchPoolCreateContent(_model_base.Model): Warning: This property is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ instead. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar application_package_references: The list of Packages to be installed on each Compute Node in the Pool. When creating a pool, the package's application ID must be fully qualified (/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}). Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. There is a maximum of 10 Package references on any given Pool. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar task_slots_per_node: The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256. :vartype task_slots_per_node: int :ivar task_scheduling_policy: How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. - :vartype task_scheduling_policy: ~azure.batch.models.BatchTaskSchedulingPolicy + :vartype task_scheduling_policy: ~client.models.BatchTaskSchedulingPolicy :ivar user_accounts: The list of user Accounts to be created on each Compute Node in the Pool. - :vartype user_accounts: list[~azure.batch.models.UserAccount] + :vartype user_accounts: list[~client.models.UserAccount] :ivar metadata: A list of name-value pairs associated with the Pool as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar mount_configuration: Mount storage using specified file system for the entire lifetime of the pool. Mount the storage using Azure fileshare, NFS, CIFS or Blobfuse based file system. - :vartype mount_configuration: list[~azure.batch.models.MountConfiguration] + :vartype mount_configuration: list[~client.models.MountConfiguration] :ivar target_node_communication_mode: The desired node communication mode for the pool. If omitted, the default value is Default. Known values are: "default", "classic", and "simplified". - :vartype target_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype target_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode :ivar upgrade_policy: The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling. - :vartype upgrade_policy: ~azure.batch.models.UpgradePolicy + :vartype upgrade_policy: ~client.models.UpgradePolicy """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -5364,7 +5360,7 @@ class BatchPoolEndpointConfiguration(_model_base.Model): Pool is 5. If the maximum number of inbound NAT Pools is exceeded the request fails with HTTP status code 400. This cannot be specified if the IPAddressProvisioningType is NoPublicIPAddresses. Required. - :vartype inbound_nat_pools: list[~azure.batch.models.InboundNatPool] + :vartype inbound_nat_pools: list[~client.models.InboundNatPool] """ inbound_nat_pools: List["_models.InboundNatPool"] = rest_field( @@ -5441,11 +5437,11 @@ class BatchPoolIdentity(_model_base.Model): resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. Required. Known values are: "UserAssigned" and "None". - :vartype type: str or ~azure.batch.models.BatchPoolIdentityType + :vartype type: str or ~client.models.BatchPoolIdentityType :ivar user_assigned_identities: The list of user identities associated with the Batch account. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'. - :vartype user_assigned_identities: list[~azure.batch.models.UserAssignedIdentity] + :vartype user_assigned_identities: list[~client.models.UserAssignedIdentity] """ type: Union[str, "_models.BatchPoolIdentityType"] = rest_field( @@ -5498,7 +5494,7 @@ class BatchPoolInfo(_model_base.Model): keepAlive is specified, deletion) of the auto Pool. Any user actions that affect the lifetime of the auto Pool while the Job is active will result in unexpected behavior. You must specify either the Pool ID or the auto Pool specification, but not both. - :vartype auto_pool_specification: ~azure.batch.models.BatchAutoPoolSpecification + :vartype auto_pool_specification: ~client.models.BatchAutoPoolSpecification """ pool_id: Optional[str] = rest_field(name="poolId", visibility=["read", "create", "update", "delete", "query"]) @@ -5544,9 +5540,9 @@ class BatchPoolNodeCounts(_model_base.Model): :ivar pool_id: The ID of the Pool. Required. :vartype pool_id: str :ivar dedicated: The number of dedicated Compute Nodes in each state. - :vartype dedicated: ~azure.batch.models.BatchNodeCounts + :vartype dedicated: ~client.models.BatchNodeCounts :ivar low_priority: The number of Spot/Low-priority Compute Nodes in each state. - :vartype low_priority: ~azure.batch.models.BatchNodeCounts + :vartype low_priority: ~client.models.BatchNodeCounts """ pool_id: str = rest_field(name="poolId", visibility=["read", "create", "update", "delete", "query"]) @@ -5587,7 +5583,7 @@ class BatchPoolReplaceContent(_model_base.Model): the Compute Node is added to the Pool or when the Compute Node is restarted. If this element is present, it overwrites any existing StartTask. If omitted, any existing StartTask is removed from the Pool. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar certificate_references: This list replaces any existing Certificate references configured on the Pool. If you specify an empty collection, any existing Certificate references are removed from the @@ -5603,7 +5599,7 @@ class BatchPoolReplaceContent(_model_base.Model): `Azure KeyVault Extension `_ instead. Required. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar application_package_references: The list of Application Packages to be installed on each Compute Node in the Pool. The list replaces any existing Application Package references on the Pool. Changes to Application Package references affect all new Compute Nodes joining the Pool, @@ -5612,16 +5608,15 @@ class BatchPoolReplaceContent(_model_base.Model): omitted, or if you specify an empty collection, any existing Application Packages references are removed from the Pool. A maximum of 10 references may be specified on a given Pool. Required. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar metadata: A list of name-value pairs associated with the Pool as metadata. This list replaces any existing metadata configured on the Pool. If omitted, or if you specify an empty collection, any existing metadata is removed from the Pool. Required. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar target_node_communication_mode: The desired node communication mode for the pool. This setting replaces any existing targetNodeCommunication setting on the Pool. If omitted, the existing setting is default. Known values are: "default", "classic", and "simplified". - :vartype target_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype target_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode """ start_task: Optional["_models.BatchStartTask"] = rest_field( @@ -5706,7 +5701,7 @@ class BatchPoolResizeContent(_model_base.Model): :ivar node_deallocation_option: Determines what to do with a Compute Node and its running task(s) if the Pool size is decreasing. The default value is requeue. Known values are: "requeue", "terminate", "taskcompletion", and "retaineddata". - :vartype node_deallocation_option: str or ~azure.batch.models.BatchNodeDeallocationOption + :vartype node_deallocation_option: str or ~client.models.BatchNodeDeallocationOption """ target_dedicated_nodes: Optional[int] = rest_field( @@ -5885,14 +5880,14 @@ class BatchPoolSpecification(_model_base.Model): :vartype vm_size: str :ivar virtual_machine_configuration: The virtual machine configuration for the Pool. This property must be specified. - :vartype virtual_machine_configuration: ~azure.batch.models.VirtualMachineConfiguration + :vartype virtual_machine_configuration: ~client.models.VirtualMachineConfiguration :ivar task_slots_per_node: The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256. :vartype task_slots_per_node: int :ivar task_scheduling_policy: How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. - :vartype task_scheduling_policy: ~azure.batch.models.BatchTaskSchedulingPolicy + :vartype task_scheduling_policy: ~client.models.BatchTaskSchedulingPolicy :ivar resize_timeout: The timeout for allocation of Compute Nodes to the Pool. This timeout applies only to manual scaling; it has no effect when enableAutoScale is set to true. The default value is 15 minutes. The minimum value is 5 minutes. If you specify a value less than 5 @@ -5937,10 +5932,10 @@ class BatchPoolSpecification(_model_base.Model): reaching its desired size. The default value is false. :vartype enable_inter_node_communication: bool :ivar network_configuration: The network configuration for the Pool. - :vartype network_configuration: ~azure.batch.models.NetworkConfiguration + :vartype network_configuration: ~client.models.NetworkConfiguration :ivar start_task: A Task to run on each Compute Node as it joins the Pool. The Task runs when the Compute Node is added to the Pool or when the Compute Node is restarted. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar certificate_references: For Windows Nodes, the Batch service installs the Certificates to the specified Certificate store and location. For Linux Compute Nodes, the Certificates are stored in a directory inside the Task working directory and an environment variable @@ -5950,30 +5945,29 @@ class BatchPoolSpecification(_model_base.Model): Warning: This property is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ instead. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar application_package_references: The list of Packages to be installed on each Compute Node in the Pool. When creating a pool, the package's application ID must be fully qualified (/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Batch/batchAccounts/{accountName}/applications/{applicationName}). Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. There is a maximum of 10 Package references on any given Pool. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar user_accounts: The list of user Accounts to be created on each Compute Node in the Pool. - :vartype user_accounts: list[~azure.batch.models.UserAccount] + :vartype user_accounts: list[~client.models.UserAccount] :ivar metadata: A list of name-value pairs associated with the Pool as metadata. The Batch service does not assign any meaning to metadata; it is solely for the use of user code. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar mount_configuration: A list of file systems to mount on each node in the pool. This supports Azure Files, NFS, CIFS/SMB, and Blobfuse. - :vartype mount_configuration: list[~azure.batch.models.MountConfiguration] + :vartype mount_configuration: list[~client.models.MountConfiguration] :ivar target_node_communication_mode: The desired node communication mode for the pool. If omitted, the default value is Default. Known values are: "default", "classic", and "simplified". - :vartype target_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype target_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode :ivar upgrade_policy: The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling. - :vartype upgrade_policy: ~azure.batch.models.UpgradePolicy + :vartype upgrade_policy: ~client.models.UpgradePolicy """ display_name: Optional[str] = rest_field( @@ -6162,9 +6156,9 @@ class BatchPoolStatistics(_model_base.Model): limited to the range between startTime and lastUpdateTime. Required. :vartype last_update_time: ~datetime.datetime :ivar usage_stats: Statistics related to Pool usage, such as the amount of core-time used. - :vartype usage_stats: ~azure.batch.models.BatchPoolUsageStatistics + :vartype usage_stats: ~client.models.BatchPoolUsageStatistics :ivar resource_stats: Statistics related to resource consumption by Compute Nodes in the Pool. - :vartype resource_stats: ~azure.batch.models.BatchPoolResourceStatistics + :vartype resource_stats: ~client.models.BatchPoolResourceStatistics """ url: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6232,7 +6226,7 @@ class BatchPoolUpdateContent(_model_base.Model): the Compute Node is added to the Pool or when the Compute Node is restarted. If this element is present, it overwrites any existing StartTask. If omitted, any existing StartTask is left unchanged. - :vartype start_task: ~azure.batch.models.BatchStartTask + :vartype start_task: ~client.models.BatchStartTask :ivar certificate_references: If this element is present, it replaces any existing Certificate references configured on the Pool. If omitted, any existing Certificate references are left unchanged. @@ -6246,28 +6240,27 @@ class BatchPoolUpdateContent(_model_base.Model): Warning: This property is deprecated and will be removed after February, 2024. Please use the `Azure KeyVault Extension `_ instead. - :vartype certificate_references: list[~azure.batch.models.BatchCertificateReference] + :vartype certificate_references: list[~client.models.BatchCertificateReference] :ivar application_package_references: A list of Packages to be installed on each Compute Node in the Pool. Changes to Package references affect all new Nodes joining the Pool, but do not affect Compute Nodes that are already in the Pool until they are rebooted or reimaged. If this element is present, it replaces any existing Package references. If you specify an empty collection, then all Package references are removed from the Pool. If omitted, any existing Package references are left unchanged. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar metadata: A list of name-value pairs associated with the Pool as metadata. If this element is present, it replaces any existing metadata configured on the Pool. If you specify an empty collection, any metadata is removed from the Pool. If omitted, any existing metadata is left unchanged. - :vartype metadata: list[~azure.batch.models.MetadataItem] + :vartype metadata: list[~client.models.MetadataItem] :ivar virtual_machine_configuration: The virtual machine configuration for the Pool. This property must be specified.

This field can be updated only when the pool is empty. - :vartype virtual_machine_configuration: ~azure.batch.models.VirtualMachineConfiguration + :vartype virtual_machine_configuration: ~client.models.VirtualMachineConfiguration :ivar target_node_communication_mode: The desired node communication mode for the pool. If this element is present, it replaces the existing targetNodeCommunicationMode configured on the Pool. If omitted, any existing metadata is left unchanged. Known values are: "default", "classic", and "simplified". - :vartype target_node_communication_mode: str or ~azure.batch.models.BatchNodeCommunicationMode + :vartype target_node_communication_mode: str or ~client.models.BatchNodeCommunicationMode :ivar task_slots_per_node: The number of task slots that can be used to run concurrent tasks on a single compute node in the pool. The default value is 1. The maximum value is the smaller of 4 times the number of cores of the vmSize of the pool or 256.

This field can be @@ -6276,10 +6269,10 @@ class BatchPoolUpdateContent(_model_base.Model): :ivar task_scheduling_policy: How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread.

This field can be updated only when the pool is empty. - :vartype task_scheduling_policy: ~azure.batch.models.BatchTaskSchedulingPolicy + :vartype task_scheduling_policy: ~client.models.BatchTaskSchedulingPolicy :ivar network_configuration: The network configuration for the Pool. This field can be updated only when the pool is empty. - :vartype network_configuration: ~azure.batch.models.NetworkConfiguration + :vartype network_configuration: ~client.models.NetworkConfiguration :ivar resource_tags: The user-specified tags associated with the pool. The user-defined tags to be associated with the Azure Batch Pool. When specified, these tags are propagated to the backing Azure resources associated with the pool. This property can only be specified when the @@ -6288,14 +6281,14 @@ class BatchPoolUpdateContent(_model_base.Model): :vartype resource_tags: dict[str, str] :ivar user_accounts: The list of user Accounts to be created on each Compute Node in the Pool. This field can be updated only when the pool is empty. - :vartype user_accounts: list[~azure.batch.models.UserAccount] + :vartype user_accounts: list[~client.models.UserAccount] :ivar mount_configuration: Mount storage using specified file system for the entire lifetime of the pool. Mount the storage using Azure fileshare, NFS, CIFS or Blobfuse based file system.

This field can be updated only when the pool is empty. - :vartype mount_configuration: list[~azure.batch.models.MountConfiguration] + :vartype mount_configuration: list[~client.models.MountConfiguration] :ivar upgrade_policy: The upgrade policy for the Pool. Describes an upgrade policy - automatic, manual, or rolling.

This field can be updated only when the pool is empty. - :vartype upgrade_policy: ~azure.batch.models.UpgradePolicy + :vartype upgrade_policy: ~client.models.UpgradePolicy """ display_name: Optional[str] = rest_field( @@ -6581,19 +6574,19 @@ class BatchStartTask(_model_base.Model): container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. There is a maximum size for the list of resource files. When the max size is exceeded, the request will fail and the response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. Files listed under this element are located in the Task's working directory. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar environment_settings: A list of environment variable settings for the StartTask. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar user_identity: The user identity under which the StartTask runs. If omitted, the Task runs as a non-administrative user unique to the Task. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity :ivar max_task_retry_count: The maximum number of times the Task may be retried. The Batch service retries a Task if its exit code is nonzero. Note that this value specifically controls the number of retries. The Batch service will try the Task once, and may then retry up to this @@ -6705,7 +6698,7 @@ class BatchStartTaskInfo(_model_base.Model): :ivar state: The state of the StartTask on the Compute Node. Required. Known values are: "running" and "completed". - :vartype state: str or ~azure.batch.models.BatchStartTaskState + :vartype state: str or ~client.models.BatchStartTaskState :ivar start_time: The time at which the StartTask started running. This value is reset every time the Task is restarted or retried (that is, this is the most recent time at which the StartTask started running). Required. @@ -6724,10 +6717,10 @@ class BatchStartTaskInfo(_model_base.Model): :vartype exit_code: int :ivar container_info: Information about the container under which the Task is executing. This property is set only if the Task runs in a container context. - :vartype container_info: ~azure.batch.models.BatchTaskContainerExecutionInfo + :vartype container_info: ~client.models.BatchTaskContainerExecutionInfo :ivar failure_info: Information describing the Task failure, if any. This property is set only if the Task is in the completed state and encountered a failure. - :vartype failure_info: ~azure.batch.models.BatchTaskFailureInfo + :vartype failure_info: ~client.models.BatchTaskFailureInfo :ivar retry_count: The number of times the Task has been retried by the Batch service. Task application failures (non-zero exit code) are retried, pre-processing errors (the Task could not be run) and file upload errors are not retried. The Batch service will retry the Task up to @@ -6742,7 +6735,7 @@ class BatchStartTaskInfo(_model_base.Model): :ivar result: The result of the Task execution. If the value is 'failed', then the details of the failure can be found in the failureInfo property. Known values are: "success" and "failure". - :vartype result: str or ~azure.batch.models.BatchTaskExecutionResult + :vartype result: str or ~client.models.BatchTaskExecutionResult """ state: Union[str, "_models.BatchStartTaskState"] = rest_field( @@ -6831,7 +6824,7 @@ class BatchSubtask(_model_base.Model): :ivar id: The ID of the subtask. :vartype id: int :ivar node_info: Information about the Compute Node on which the subtask ran. - :vartype node_info: ~azure.batch.models.BatchNodeInfo + :vartype node_info: ~client.models.BatchNodeInfo :ivar start_time: The time at which the subtask started running. If the subtask has been restarted or retried, this is the most recent time at which the subtask started running. :vartype start_time: ~datetime.datetime @@ -6848,26 +6841,26 @@ class BatchSubtask(_model_base.Model): :vartype exit_code: int :ivar container_info: Information about the container under which the Task is executing. This property is set only if the Task runs in a container context. - :vartype container_info: ~azure.batch.models.BatchTaskContainerExecutionInfo + :vartype container_info: ~client.models.BatchTaskContainerExecutionInfo :ivar failure_info: Information describing the Task failure, if any. This property is set only if the Task is in the completed state and encountered a failure. - :vartype failure_info: ~azure.batch.models.BatchTaskFailureInfo + :vartype failure_info: ~client.models.BatchTaskFailureInfo :ivar state: The current state of the subtask. Known values are: "preparing", "running", and "completed". - :vartype state: str or ~azure.batch.models.BatchSubtaskState + :vartype state: str or ~client.models.BatchSubtaskState :ivar state_transition_time: The time at which the subtask entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar previous_state: The previous state of the subtask. This property is not set if the subtask is in its initial running state. Known values are: "preparing", "running", and "completed". - :vartype previous_state: str or ~azure.batch.models.BatchSubtaskState + :vartype previous_state: str or ~client.models.BatchSubtaskState :ivar previous_state_transition_time: The time at which the subtask entered its previous state. This property is not set if the subtask is in its initial running state. :vartype previous_state_transition_time: ~datetime.datetime :ivar result: The result of the Task execution. If the value is 'failed', then the details of the failure can be found in the failureInfo property. Known values are: "success" and "failure". - :vartype result: str or ~azure.batch.models.BatchTaskExecutionResult + :vartype result: str or ~client.models.BatchTaskExecutionResult """ id: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -6966,10 +6959,10 @@ class BatchSupportedImage(_model_base.Model): :vartype node_agent_sku_id: str :ivar image_reference: The reference to the Azure Virtual Machine's Marketplace Image. Required. - :vartype image_reference: ~azure.batch.models.ImageReference + :vartype image_reference: ~client.models.ImageReference :ivar os_type: The type of operating system (e.g. Windows or Linux) of the Image. Required. Known values are: "linux" and "windows". - :vartype os_type: str or ~azure.batch.models.OSType + :vartype os_type: str or ~client.models.OSType :ivar capabilities: The capabilities or features which the Image supports. Not every capability of the Image is listed. Capabilities in this list are considered of special interest and are generally related to integration with other features in the Azure Batch service. @@ -6980,7 +6973,7 @@ class BatchSupportedImage(_model_base.Model): :ivar verification_type: Whether the Azure Batch service actively verifies that the Image is compatible with the associated Compute Node agent SKU. Required. Known values are: "verified" and "unverified". - :vartype verification_type: str or ~azure.batch.models.ImageVerificationType + :vartype verification_type: str or ~client.models.ImageVerificationType """ node_agent_sku_id: str = rest_field( @@ -7063,16 +7056,16 @@ class BatchTask(_model_base.Model): :ivar creation_time: The creation time of the Task. :vartype creation_time: ~datetime.datetime :ivar exit_conditions: How the Batch service should respond when the Task completes. - :vartype exit_conditions: ~azure.batch.models.ExitConditions + :vartype exit_conditions: ~client.models.ExitConditions :ivar state: The current state of the Task. Known values are: "active", "preparing", "running", and "completed". - :vartype state: str or ~azure.batch.models.BatchTaskState + :vartype state: str or ~client.models.BatchTaskState :ivar state_transition_time: The time at which the Task entered its current state. :vartype state_transition_time: ~datetime.datetime :ivar previous_state: The previous state of the Task. This property is not set if the Task is in its initial Active state. Known values are: "active", "preparing", "running", and "completed". - :vartype previous_state: str or ~azure.batch.models.BatchTaskState + :vartype previous_state: str or ~client.models.BatchTaskState :ivar previous_state_transition_time: The time at which the Task entered its previous state. This property is not set if the Task is in its initial Active state. :vartype previous_state_transition_time: ~datetime.datetime @@ -7096,7 +7089,7 @@ class BatchTask(_model_base.Model): container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. For multi-instance Tasks, the resource files will only be downloaded to the Compute Node on which the primary Task is executed. There is a maximum size @@ -7104,46 +7097,45 @@ class BatchTask(_model_base.Model): response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar output_files: A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. - :vartype output_files: list[~azure.batch.models.OutputFile] + :vartype output_files: list[~client.models.OutputFile] :ivar environment_settings: A list of environment variable settings for the Task. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar affinity_info: A locality hint that can be used by the Batch service to select a Compute Node on which to start the new Task. - :vartype affinity_info: ~azure.batch.models.AffinityInfo + :vartype affinity_info: ~client.models.AffinityInfo :ivar constraints: The execution constraints that apply to this Task. - :vartype constraints: ~azure.batch.models.BatchTaskConstraints + :vartype constraints: ~client.models.BatchTaskConstraints :ivar required_slots: The number of scheduling slots that the Task requires to run. The default is 1. A Task can only be scheduled to run on a compute node if the node has enough free scheduling slots available. For multi-instance Tasks, this must be 1. :vartype required_slots: int :ivar user_identity: The user identity under which the Task runs. If omitted, the Task runs as a non-administrative user unique to the Task. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity :ivar execution_info: Information about the execution of the Task. - :vartype execution_info: ~azure.batch.models.BatchTaskExecutionInfo + :vartype execution_info: ~client.models.BatchTaskExecutionInfo :ivar node_info: Information about the Compute Node on which the Task ran. - :vartype node_info: ~azure.batch.models.BatchNodeInfo + :vartype node_info: ~client.models.BatchNodeInfo :ivar multi_instance_settings: An object that indicates that the Task is a multi-instance Task, and contains information about how to run the multi-instance Task. - :vartype multi_instance_settings: ~azure.batch.models.MultiInstanceSettings + :vartype multi_instance_settings: ~client.models.MultiInstanceSettings :ivar stats: Resource usage statistics for the Task. - :vartype stats: ~azure.batch.models.BatchTaskStatistics + :vartype stats: ~client.models.BatchTaskStatistics :ivar depends_on: The Tasks that this Task depends on. This Task will not be scheduled until all Tasks that it depends on have completed successfully. If any of those Tasks fail and exhaust their retry counts, this Task will never be scheduled. - :vartype depends_on: ~azure.batch.models.BatchTaskDependencies + :vartype depends_on: ~client.models.BatchTaskDependencies :ivar application_package_references: A list of Packages that the Batch service will deploy to the Compute Node before running the command line. Application packages are downloaded and deployed to a shared directory, not the Task working directory. Therefore, if a referenced package is already on the Node, and is up to date, then it is not re-downloaded; the existing copy on the Compute Node is used. If a referenced Package cannot be installed, for example because the package has been deleted or because download failed, the Task fails. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar authentication_token_settings: The settings for an authentication token that the Task can use to perform Batch service operations. If this property is set, the Batch service provides the Task with an authentication token which can be used to authenticate Batch service @@ -7152,7 +7144,7 @@ class BatchTask(_model_base.Model): using the token depend on the settings. For example, a Task can request Job permissions in order to add other Tasks to the Job, or check the status of the Job or of other Tasks under the Job. - :vartype authentication_token_settings: ~azure.batch.models.AuthenticationTokenSettings + :vartype authentication_token_settings: ~client.models.AuthenticationTokenSettings """ id: Optional[str] = rest_field(visibility=["read"]) @@ -7301,7 +7293,7 @@ class BatchTaskAddCollectionResult(_model_base.Model): """The result of adding a collection of Tasks to a Job. :ivar value: The results of the add Task collection operation. - :vartype value: list[~azure.batch.models.BatchTaskAddResult] + :vartype value: list[~client.models.BatchTaskAddResult] """ value: Optional[List["_models.BatchTaskAddResult"]] = rest_field( @@ -7332,7 +7324,7 @@ class BatchTaskAddResult(_model_base.Model): :ivar status: The status of the add Task request. Required. Known values are: "success", "clienterror", and "servererror". - :vartype status: str or ~azure.batch.models.BatchTaskAddStatus + :vartype status: str or ~client.models.BatchTaskAddStatus :ivar task_id: The ID of the Task for which this is the result. Required. :vartype task_id: str :ivar e_tag: The ETag of the Task, if the Task was successfully added. You can use this to @@ -7345,7 +7337,7 @@ class BatchTaskAddResult(_model_base.Model): :ivar location: The URL of the Task, if the Task was successfully added. :vartype location: str :ivar error: The error encountered while attempting to add the Task. - :vartype error: ~azure.batch.models.BatchError + :vartype error: ~client.models.BatchError """ status: Union[str, "_models.BatchTaskAddStatus"] = rest_field( @@ -7519,16 +7511,16 @@ class BatchTaskContainerSettings(_model_base.Model): :vartype image_name: str :ivar registry: The private registry which contains the container Image. This setting can be omitted if was already provided at Pool creation. - :vartype registry: ~azure.batch.models.ContainerRegistryReference + :vartype registry: ~client.models.ContainerRegistryReference :ivar working_directory: The location of the container Task working directory. The default is 'taskWorkingDirectory'. Known values are: "taskWorkingDirectory" and "containerImageDefault". - :vartype working_directory: str or ~azure.batch.models.ContainerWorkingDirectory + :vartype working_directory: str or ~client.models.ContainerWorkingDirectory :ivar container_host_batch_bind_mounts: The paths you want to mounted to container task. If this array is null or be not present, container task will mount entire temporary disk drive in windows (or AZ_BATCH_NODE_ROOT_DIR in Linux). It won't' mount any data paths into container if this array is set as empty. :vartype container_host_batch_bind_mounts: - list[~azure.batch.models.ContainerHostBatchBindMountEntry] + list[~client.models.ContainerHostBatchBindMountEntry] """ container_run_options: Optional[str] = rest_field( @@ -7636,9 +7628,9 @@ class BatchTaskCountsResult(_model_base.Model): """The Task and TaskSlot counts for a Job. :ivar task_counts: The number of Tasks per state. Required. - :vartype task_counts: ~azure.batch.models.BatchTaskCounts + :vartype task_counts: ~client.models.BatchTaskCounts :ivar task_slot_counts: The number of TaskSlots required by Tasks per state. Required. - :vartype task_slot_counts: ~azure.batch.models.BatchTaskSlotCounts + :vartype task_slot_counts: ~client.models.BatchTaskSlotCounts """ task_counts: "_models.BatchTaskCounts" = rest_field( @@ -7681,7 +7673,7 @@ class BatchTaskCreateContent(_model_base.Model): contain any Unicode characters up to a maximum length of 1024. :vartype display_name: str :ivar exit_conditions: How the Batch service should respond when the Task completes. - :vartype exit_conditions: ~azure.batch.models.ExitConditions + :vartype exit_conditions: ~client.models.ExitConditions :ivar command_line: The command line of the Task. For multi-instance Tasks, the command line is executed as the primary Task, after the primary Task and all subtasks have finished executing the coordination command line. The command line does not run under a shell, and therefore @@ -7703,7 +7695,7 @@ class BatchTaskCreateContent(_model_base.Model): container. Files produced in the container outside of AZ_BATCH_NODE_ROOT_DIR might not be reflected to the host disk, meaning that Batch file APIs will not be able to access those files. - :vartype container_settings: ~azure.batch.models.BatchTaskContainerSettings + :vartype container_settings: ~client.models.BatchTaskContainerSettings :ivar resource_files: A list of files that the Batch service will download to the Compute Node before running the command line. For multi-instance Tasks, the resource files will only be downloaded to the Compute Node on which the primary Task is executed. There is a maximum size @@ -7711,44 +7703,43 @@ class BatchTaskCreateContent(_model_base.Model): response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. - :vartype resource_files: list[~azure.batch.models.ResourceFile] + :vartype resource_files: list[~client.models.ResourceFile] :ivar output_files: A list of files that the Batch service will upload from the Compute Node after running the command line. For multi-instance Tasks, the files will only be uploaded from the Compute Node on which the primary Task is executed. - :vartype output_files: list[~azure.batch.models.OutputFile] + :vartype output_files: list[~client.models.OutputFile] :ivar environment_settings: A list of environment variable settings for the Task. - :vartype environment_settings: list[~azure.batch.models.EnvironmentSetting] + :vartype environment_settings: list[~client.models.EnvironmentSetting] :ivar affinity_info: A locality hint that can be used by the Batch service to select a Compute Node on which to start the new Task. - :vartype affinity_info: ~azure.batch.models.AffinityInfo + :vartype affinity_info: ~client.models.AffinityInfo :ivar constraints: The execution constraints that apply to this Task. If you do not specify constraints, the maxTaskRetryCount is the maxTaskRetryCount specified for the Job, the maxWallClockTime is infinite, and the retentionTime is 7 days. - :vartype constraints: ~azure.batch.models.BatchTaskConstraints + :vartype constraints: ~client.models.BatchTaskConstraints :ivar required_slots: The number of scheduling slots that the Task required to run. The default is 1. A Task can only be scheduled to run on a compute node if the node has enough free scheduling slots available. For multi-instance Tasks, this must be 1. :vartype required_slots: int :ivar user_identity: The user identity under which the Task runs. If omitted, the Task runs as a non-administrative user unique to the Task. - :vartype user_identity: ~azure.batch.models.UserIdentity + :vartype user_identity: ~client.models.UserIdentity :ivar multi_instance_settings: An object that indicates that the Task is a multi-instance Task, and contains information about how to run the multi-instance Task. - :vartype multi_instance_settings: ~azure.batch.models.MultiInstanceSettings + :vartype multi_instance_settings: ~client.models.MultiInstanceSettings :ivar depends_on: The Tasks that this Task depends on. This Task will not be scheduled until all Tasks that it depends on have completed successfully. If any of those Tasks fail and exhaust their retry counts, this Task will never be scheduled. If the Job does not have usesTaskDependencies set to true, and this element is present, the request fails with error code TaskDependenciesNotSpecifiedOnJob. - :vartype depends_on: ~azure.batch.models.BatchTaskDependencies + :vartype depends_on: ~client.models.BatchTaskDependencies :ivar application_package_references: A list of Packages that the Batch service will deploy to the Compute Node before running the command line. Application packages are downloaded and deployed to a shared directory, not the Task working directory. Therefore, if a referenced package is already on the Node, and is up to date, then it is not re-downloaded; the existing copy on the Compute Node is used. If a referenced Package cannot be installed, for example because the package has been deleted or because download failed, the Task fails. - :vartype application_package_references: - list[~azure.batch.models.BatchApplicationPackageReference] + :vartype application_package_references: list[~client.models.BatchApplicationPackageReference] :ivar authentication_token_settings: The settings for an authentication token that the Task can use to perform Batch service operations. If this property is set, the Batch service provides the Task with an authentication token which can be used to authenticate Batch service @@ -7757,7 +7748,7 @@ class BatchTaskCreateContent(_model_base.Model): using the token depend on the settings. For example, a Task can request Job permissions in order to add other Tasks to the Job, or check the status of the Job or of other Tasks under the Job. - :vartype authentication_token_settings: ~azure.batch.models.AuthenticationTokenSettings + :vartype authentication_token_settings: ~client.models.AuthenticationTokenSettings """ id: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -7918,7 +7909,7 @@ class BatchTaskDependencies(_model_base.Model): :vartype task_ids: list[str] :ivar task_id_ranges: The list of Task ID ranges that this Task depends on. All Tasks in all ranges must complete successfully before the dependent Task can be scheduled. - :vartype task_id_ranges: list[~azure.batch.models.BatchTaskIdRange] + :vartype task_id_ranges: list[~client.models.BatchTaskIdRange] """ task_ids: Optional[List[str]] = rest_field( @@ -7976,10 +7967,10 @@ class BatchTaskExecutionInfo(_model_base.Model): :vartype exit_code: int :ivar container_info: Information about the container under which the Task is executing. This property is set only if the Task runs in a container context. - :vartype container_info: ~azure.batch.models.BatchTaskContainerExecutionInfo + :vartype container_info: ~client.models.BatchTaskContainerExecutionInfo :ivar failure_info: Information describing the Task failure, if any. This property is set only if the Task is in the completed state and encountered a failure. - :vartype failure_info: ~azure.batch.models.BatchTaskFailureInfo + :vartype failure_info: ~client.models.BatchTaskFailureInfo :ivar retry_count: The number of times the Task has been retried by the Batch service. Task application failures (non-zero exit code) are retried, pre-processing errors (the Task could not be run) and file upload errors are not retried. The Batch service will retry the Task up to @@ -8004,7 +7995,7 @@ class BatchTaskExecutionInfo(_model_base.Model): :ivar result: The result of the Task execution. If the value is 'failed', then the details of the failure can be found in the failureInfo property. Known values are: "success" and "failure". - :vartype result: str or ~azure.batch.models.BatchTaskExecutionResult + :vartype result: str or ~client.models.BatchTaskExecutionResult """ start_time: Optional[datetime.datetime] = rest_field( @@ -8099,7 +8090,7 @@ class BatchTaskFailureInfo(_model_base.Model): :ivar category: The category of the Task error. Required. Known values are: "usererror" and "servererror". - :vartype category: str or ~azure.batch.models.ErrorCategory + :vartype category: str or ~client.models.ErrorCategory :ivar code: An identifier for the Task error. Codes are invariant and are intended to be consumed programmatically. :vartype code: str @@ -8107,7 +8098,7 @@ class BatchTaskFailureInfo(_model_base.Model): user interface. :vartype message: str :ivar details: A list of additional details related to the error. - :vartype details: list[~azure.batch.models.NameValuePair] + :vartype details: list[~client.models.NameValuePair] """ category: Union[str, "_models.ErrorCategory"] = rest_field( @@ -8152,7 +8143,7 @@ class BatchTaskGroup(_model_base.Model): serialized size of this collection must be less than 1MB. If it is greater than 1MB (for example if each Task has 100's of resource files or environment variables), the request will fail with code 'RequestBodyTooLarge' and should be retried again with fewer Tasks. Required. - :vartype value: list[~azure.batch.models.BatchTaskCreateContent] + :vartype value: list[~client.models.BatchTaskCreateContent] """ value: List["_models.BatchTaskCreateContent"] = rest_field( @@ -8228,9 +8219,9 @@ class BatchTaskInfo(_model_base.Model): :vartype subtask_id: int :ivar task_state: The current state of the Task. Required. Known values are: "active", "preparing", "running", and "completed". - :vartype task_state: str or ~azure.batch.models.BatchTaskState + :vartype task_state: str or ~client.models.BatchTaskState :ivar execution_info: Information about the execution of the Task. - :vartype execution_info: ~azure.batch.models.BatchTaskExecutionInfo + :vartype execution_info: ~client.models.BatchTaskExecutionInfo """ task_url: Optional[str] = rest_field(name="taskUrl", visibility=["read", "create", "update", "delete", "query"]) @@ -8279,7 +8270,7 @@ class BatchTaskSchedulingPolicy(_model_base.Model): :ivar node_fill_type: How Tasks are distributed across Compute Nodes in a Pool. If not specified, the default is spread. Required. Known values are: "spread" and "pack". - :vartype node_fill_type: str or ~azure.batch.models.BatchNodeFillType + :vartype node_fill_type: str or ~client.models.BatchNodeFillType """ node_fill_type: Union[str, "_models.BatchNodeFillType"] = rest_field( @@ -8528,7 +8519,7 @@ class ContainerConfiguration(_model_base.Model): :ivar type: The container technology to be used. Required. Known values are: "dockerCompatible" and "criCompatible". - :vartype type: str or ~azure.batch.models.ContainerType + :vartype type: str or ~client.models.ContainerType :ivar container_image_names: The collection of container Image names. This is the full Image reference, as would be specified to "docker pull". An Image will be sourced from the default Docker registry unless the Image is fully qualified with an alternative registry. @@ -8536,7 +8527,7 @@ class ContainerConfiguration(_model_base.Model): :ivar container_registries: Additional private registries from which containers can be pulled. If any Images must be downloaded from a private registry which requires credentials, then those credentials must be provided here. - :vartype container_registries: list[~azure.batch.models.ContainerRegistryReference] + :vartype container_registries: list[~client.models.ContainerRegistryReference] """ type: Union[str, "_models.ContainerType"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -8580,7 +8571,7 @@ class ContainerHostBatchBindMountEntry(_model_base.Model): :ivar source: The path which be mounted to container customer can select. Known values are: "Shared", "Startup", "VfsMounts", "Task", "JobPrep", and "Applications". - :vartype source: str or ~azure.batch.models.ContainerHostDataPath + :vartype source: str or ~client.models.ContainerHostDataPath :ivar is_read_only: Mount this source path as read-only mode or not. Default value is false (read/write mode). For Linux, if you mount this path as a read/write mode, this does not mean that all users in container have the read/write access for the path, it depends on the access @@ -8632,7 +8623,7 @@ class ContainerRegistryReference(_model_base.Model): :vartype registry_server: str :ivar identity_reference: The reference to the user assigned identity to use to access an Azure Container Registry instead of username and password. - :vartype identity_reference: ~azure.batch.models.BatchNodeIdentityReference + :vartype identity_reference: ~client.models.BatchNodeIdentityReference """ username: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -8684,13 +8675,13 @@ class DataDisk(_model_base.Model): `https://blogs.msdn.microsoft.com/windowsazurestorage/2012/06/27/exploring-windows-azure-drives-disks-and-images/ `_. Known values are: "none", "readonly", and "readwrite". - :vartype caching: str or ~azure.batch.models.CachingType + :vartype caching: str or ~client.models.CachingType :ivar disk_size_gb: The initial disk size in gigabytes. Required. :vartype disk_size_gb: int :ivar storage_account_type: The storage Account type to be used for the data disk. If omitted, the default is "standard_lrs". Known values are: "standard_lrs", "premium_lrs", and "standardssd_lrs". - :vartype storage_account_type: str or ~azure.batch.models.StorageAccountType + :vartype storage_account_type: str or ~client.models.StorageAccountType """ logical_unit_number: int = rest_field(name="lun", visibility=["read", "create", "update", "delete", "query"]) @@ -8747,7 +8738,7 @@ class DeleteBatchCertificateError(_model_base.Model): error. This list includes details such as the active Pools and Compute Nodes referencing this Certificate. However, if a large number of resources reference the Certificate, the list contains only about the first hundred. - :vartype values_property: list[~azure.batch.models.NameValuePair] + :vartype values_property: list[~client.models.NameValuePair] """ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -8798,7 +8789,7 @@ class DiffDiskSettings(_model_base.Model): `https://learn.microsoft.com/azure/virtual-machines/linux/ephemeral-os-disks#size-requirements `_. "cachedisk" - :vartype placement: str or ~azure.batch.models.DiffDiskPlacement + :vartype placement: str or ~client.models.DiffDiskPlacement """ placement: Optional[Union[str, "_models.DiffDiskPlacement"]] = rest_field( @@ -8840,7 +8831,7 @@ class DiskEncryptionConfiguration(_model_base.Model): :ivar targets: The list of disk targets Batch Service will encrypt on the compute node. The list of disk targets Batch Service will encrypt on the compute node. - :vartype targets: list[str or ~azure.batch.models.DiskEncryptionTarget] + :vartype targets: list[str or ~client.models.DiskEncryptionTarget] """ targets: Optional[List[Union[str, "_models.DiskEncryptionTarget"]]] = rest_field( @@ -8908,7 +8899,7 @@ class ExitCodeMapping(_model_base.Model): :vartype code: int :ivar exit_options: How the Batch service should respond if the Task exits with this exit code. Required. - :vartype exit_options: ~azure.batch.models.ExitOptions + :vartype exit_options: ~client.models.ExitOptions """ code: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -8947,7 +8938,7 @@ class ExitCodeRangeMapping(_model_base.Model): :vartype end: int :ivar exit_options: How the Batch service should respond if the Task exits with an exit code in the range start to end (inclusive). Required. - :vartype exit_options: ~azure.batch.models.ExitOptions + :vartype exit_options: ~client.models.ExitOptions """ start: int = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -8985,24 +8976,24 @@ class ExitConditions(_model_base.Model): :ivar exit_codes: A list of individual Task exit codes and how the Batch service should respond to them. - :vartype exit_codes: list[~azure.batch.models.ExitCodeMapping] + :vartype exit_codes: list[~client.models.ExitCodeMapping] :ivar exit_code_ranges: A list of Task exit code ranges and how the Batch service should respond to them. - :vartype exit_code_ranges: list[~azure.batch.models.ExitCodeRangeMapping] + :vartype exit_code_ranges: list[~client.models.ExitCodeRangeMapping] :ivar pre_processing_error: How the Batch service should respond if the Task fails to start due to an error. - :vartype pre_processing_error: ~azure.batch.models.ExitOptions + :vartype pre_processing_error: ~client.models.ExitOptions :ivar file_upload_error: How the Batch service should respond if a file upload error occurs. If the Task exited with an exit code that was specified via exitCodes or exitCodeRanges, and then encountered a file upload error, then the action specified by the exit code takes precedence. - :vartype file_upload_error: ~azure.batch.models.ExitOptions + :vartype file_upload_error: ~client.models.ExitOptions :ivar default: How the Batch service should respond if the Task fails with an exit condition not covered by any of the other properties. This value is used if the Task exits with any nonzero exit code not listed in the exitCodes or exitCodeRanges collection, with a pre-processing error if the preProcessingError property is not present, or with a file upload error if the fileUploadError property is not present. If you want non-default behavior on exit code 0, you must list it explicitly using the exitCodes or exitCodeRanges collection. - :vartype default: ~azure.batch.models.ExitOptions + :vartype default: ~client.models.ExitOptions """ exit_codes: Optional[List["_models.ExitCodeMapping"]] = rest_field( @@ -9063,12 +9054,12 @@ class ExitOptions(_model_base.Model): Task request fails with an invalid property value error; if you are calling the REST API directly, the HTTP status code is 400 (Bad Request). Known values are: "none", "disable", and "terminate". - :vartype job_action: str or ~azure.batch.models.BatchJobAction + :vartype job_action: str or ~client.models.BatchJobAction :ivar dependency_action: An action that the Batch service performs on Tasks that depend on this Task. Possible values are 'satisfy' (allowing dependent tasks to progress) and 'block' (dependent tasks continue to wait). Batch does not yet support cancellation of dependent tasks. Known values are: "satisfy" and "block". - :vartype dependency_action: str or ~azure.batch.models.DependencyAction + :vartype dependency_action: str or ~client.models.DependencyAction """ job_action: Optional[Union[str, "_models.BatchJobAction"]] = rest_field( @@ -9166,88 +9157,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) -class GetCertificateResponse(_model_base.Model): - """GetCertificateResponse. - - :ivar thumbprint: The X.509 thumbprint of the Certificate. This is a sequence of up to 40 hex - digits (it may include spaces but these are removed). Required. - :vartype thumbprint: str - :ivar thumbprint_algorithm: The algorithm used to derive the thumbprint. This must be sha1. - Required. - :vartype thumbprint_algorithm: str - :ivar url: The URL of the Certificate. - :vartype url: str - :ivar state: The state of the Certificate. Known values are: "active", "deleting", and - "deletefailed". - :vartype state: str or ~azure.batch.models.BatchCertificateState - :ivar state_transition_time: The time at which the Certificate entered its current state. - :vartype state_transition_time: ~datetime.datetime - :ivar previous_state: The previous state of the Certificate. This property is not set if the - Certificate is in its initial active state. Known values are: "active", "deleting", and - "deletefailed". - :vartype previous_state: str or ~azure.batch.models.BatchCertificateState - :ivar previous_state_transition_time: The time at which the Certificate entered its previous - state. This property is not set if the Certificate is in its initial Active state. - :vartype previous_state_transition_time: ~datetime.datetime - :ivar public_data: The public part of the Certificate as a base-64 encoded .cer file. - :vartype public_data: str - :ivar delete_certificate_error: The error that occurred on the last attempt to delete this - Certificate. This property is set only if the Certificate is in the DeleteFailed state. - :vartype delete_certificate_error: ~azure.batch.models.DeleteBatchCertificateError - """ - - thumbprint: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) - """The X.509 thumbprint of the Certificate. This is a sequence of up to 40 hex digits (it may - include spaces but these are removed). Required.""" - thumbprint_algorithm: str = rest_field( - name="thumbprintAlgorithm", visibility=["read", "create", "update", "delete", "query"] - ) - """The algorithm used to derive the thumbprint. This must be sha1. Required.""" - url: Optional[str] = rest_field(visibility=["read"]) - """The URL of the Certificate.""" - state: Optional[Union[str, "_models.BatchCertificateState"]] = rest_field(visibility=["read"]) - """The state of the Certificate. Known values are: \"active\", \"deleting\", and \"deletefailed\".""" - state_transition_time: Optional[datetime.datetime] = rest_field( - name="stateTransitionTime", visibility=["read"], format="rfc3339" - ) - """The time at which the Certificate entered its current state.""" - previous_state: Optional[Union[str, "_models.BatchCertificateState"]] = rest_field( - name="previousState", visibility=["read"] - ) - """The previous state of the Certificate. This property is not set if the Certificate is in its - initial active state. Known values are: \"active\", \"deleting\", and \"deletefailed\".""" - previous_state_transition_time: Optional[datetime.datetime] = rest_field( - name="previousStateTransitionTime", visibility=["read"], format="rfc3339" - ) - """The time at which the Certificate entered its previous state. This property is not set if the - Certificate is in its initial Active state.""" - public_data: Optional[str] = rest_field(name="publicData", visibility=["read"]) - """The public part of the Certificate as a base-64 encoded .cer file.""" - delete_certificate_error: Optional["_models.DeleteBatchCertificateError"] = rest_field( - name="deleteCertificateError", visibility=["read"] - ) - """The error that occurred on the last attempt to delete this Certificate. This property is set - only if the Certificate is in the DeleteFailed state.""" - - @overload - def __init__( - self, - *, - thumbprint: str, - thumbprint_algorithm: str, - ) -> None: ... - - @overload - def __init__(self, mapping: Mapping[str, Any]) -> None: - """ - :param mapping: raw JSON to initialize the model. - :type mapping: Mapping[str, Any] - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - class HttpHeader(_model_base.Model): """An HTTP header name-value pair. @@ -9397,7 +9306,7 @@ class InboundEndpoint(_model_base.Model): :ivar name: The name of the endpoint. Required. :vartype name: str :ivar protocol: The protocol of the endpoint. Required. Known values are: "tcp" and "udp". - :vartype protocol: str or ~azure.batch.models.InboundEndpointProtocol + :vartype protocol: str or ~client.models.InboundEndpointProtocol :ivar public_ip_address: The public IP address of the Compute Node. Required. :vartype public_ip_address: str :ivar public_fqdn: The public fully qualified domain name for the Compute Node. Required. @@ -9458,7 +9367,7 @@ class InboundNatPool(_model_base.Model): values are provided the request fails with HTTP status code 400. Required. :vartype name: str :ivar protocol: The protocol of the endpoint. Required. Known values are: "tcp" and "udp". - :vartype protocol: str or ~azure.batch.models.InboundEndpointProtocol + :vartype protocol: str or ~client.models.InboundEndpointProtocol :ivar backend_port: The port number on the Compute Node. This must be unique within a Batch Pool. Acceptable values are between 1 and 65535 except for 29876 and 29877 as these are reserved. If any reserved values are provided the request fails with HTTP status code 400. @@ -9483,7 +9392,7 @@ class InboundNatPool(_model_base.Model): a Batch Pool is 25. If no network security group rules are specified, a default rule will be created to allow inbound access to the specified backendPort. If the maximum number of network security group rules is exceeded the request fails with HTTP status code 400. - :vartype network_security_group_rules: list[~azure.batch.models.NetworkSecurityGroupRule] + :vartype network_security_group_rules: list[~client.models.NetworkSecurityGroupRule] """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -9556,7 +9465,7 @@ class InstanceViewStatus(_model_base.Model): :ivar display_status: The localized label for the status. :vartype display_status: str :ivar level: Level code. Known values are: "Error", "Info", and "Warning". - :vartype level: str or ~azure.batch.models.StatusLevelTypes + :vartype level: str or ~client.models.StatusLevelTypes :ivar message: The detailed status message. :vartype message: str :ivar time: The time of the status. @@ -9662,9 +9571,9 @@ class ManagedDisk(_model_base.Model): :ivar storage_account_type: The storage account type for managed disk. Known values are: "standard_lrs", "premium_lrs", and "standardssd_lrs". - :vartype storage_account_type: str or ~azure.batch.models.StorageAccountType + :vartype storage_account_type: str or ~client.models.StorageAccountType :ivar security_profile: Specifies the security profile settings for the managed disk. - :vartype security_profile: ~azure.batch.models.VMDiskSecurityProfile + :vartype security_profile: ~client.models.VMDiskSecurityProfile """ storage_account_type: Optional[Union[str, "_models.StorageAccountType"]] = rest_field( @@ -9735,17 +9644,16 @@ class MountConfiguration(_model_base.Model): :ivar azure_blob_file_system_configuration: The Azure Storage Container to mount using blob FUSE on each node. This property is mutually exclusive with all other properties. - :vartype azure_blob_file_system_configuration: - ~azure.batch.models.AzureBlobFileSystemConfiguration + :vartype azure_blob_file_system_configuration: ~client.models.AzureBlobFileSystemConfiguration :ivar nfs_mount_configuration: The NFS file system to mount on each node. This property is mutually exclusive with all other properties. - :vartype nfs_mount_configuration: ~azure.batch.models.NfsMountConfiguration + :vartype nfs_mount_configuration: ~client.models.NfsMountConfiguration :ivar cifs_mount_configuration: The CIFS/SMB file system to mount on each node. This property is mutually exclusive with all other properties. - :vartype cifs_mount_configuration: ~azure.batch.models.CifsMountConfiguration + :vartype cifs_mount_configuration: ~client.models.CifsMountConfiguration :ivar azure_file_share_configuration: The Azure File Share to mount on each node. This property is mutually exclusive with all other properties. - :vartype azure_file_share_configuration: ~azure.batch.models.AzureFileShareConfiguration + :vartype azure_file_share_configuration: ~client.models.AzureFileShareConfiguration """ azure_blob_file_system_configuration: Optional["_models.AzureBlobFileSystemConfiguration"] = rest_field( @@ -9814,7 +9722,7 @@ class MultiInstanceSettings(_model_base.Model): response error code will be RequestEntityTooLarge. If this occurs, the collection of ResourceFiles must be reduced in size. This can be achieved using .zip files, Application Packages, or Docker Containers. - :vartype common_resource_files: list[~azure.batch.models.ResourceFile] + :vartype common_resource_files: list[~client.models.ResourceFile] """ number_of_instances: Optional[int] = rest_field( @@ -9917,13 +9825,13 @@ class NetworkConfiguration(_model_base.Model): :vartype subnet_id: str :ivar dynamic_v_net_assignment_scope: The scope of dynamic vnet assignment. Known values are: "none" and "job". - :vartype dynamic_v_net_assignment_scope: str or ~azure.batch.models.DynamicVNetAssignmentScope + :vartype dynamic_v_net_assignment_scope: str or ~client.models.DynamicVNetAssignmentScope :ivar endpoint_configuration: The configuration for endpoints on Compute Nodes in the Batch Pool. - :vartype endpoint_configuration: ~azure.batch.models.BatchPoolEndpointConfiguration + :vartype endpoint_configuration: ~client.models.BatchPoolEndpointConfiguration :ivar public_ip_address_configuration: The Public IPAddress configuration for Compute Nodes in the Batch Pool. - :vartype public_ip_address_configuration: ~azure.batch.models.PublicIpAddressConfiguration + :vartype public_ip_address_configuration: ~client.models.PublicIpAddressConfiguration :ivar enable_accelerated_networking: Whether this pool should enable accelerated networking. Accelerated networking enables single root I/O virtualization (SR-IOV) to a VM, which may lead to improved networking performance. For more details, see: @@ -10006,7 +9914,7 @@ class NetworkSecurityGroupRule(_model_base.Model): :vartype priority: int :ivar access: The action that should be taken for a specified IP address, subnet range or tag. Required. Known values are: "allow" and "deny". - :vartype access: str or ~azure.batch.models.NetworkSecurityGroupRuleAccess + :vartype access: str or ~client.models.NetworkSecurityGroupRuleAccess :ivar source_address_prefix: The source address prefix or tag to match for the rule. Valid values are a single IP address (i.e. 10.10.10.10), IP subnet (i.e. 192.168.1.0/24), default tag, or * (for all addresses). If any other values are provided the request fails with HTTP @@ -10121,15 +10029,15 @@ class OSDisk(_model_base.Model): :ivar ephemeral_os_disk_settings: Specifies the ephemeral Disk Settings for the operating system disk used by the compute node (VM). - :vartype ephemeral_os_disk_settings: ~azure.batch.models.DiffDiskSettings + :vartype ephemeral_os_disk_settings: ~client.models.DiffDiskSettings :ivar caching: Specifies the caching requirements. Possible values are: None, ReadOnly, ReadWrite. The default values are: None for Standard storage. ReadOnly for Premium storage. Known values are: "none", "readonly", and "readwrite". - :vartype caching: str or ~azure.batch.models.CachingType + :vartype caching: str or ~client.models.CachingType :ivar disk_size_gb: The initial disk size in GB when creating new OS disk. :vartype disk_size_gb: int :ivar managed_disk: The managed disk parameters. - :vartype managed_disk: ~azure.batch.models.ManagedDisk + :vartype managed_disk: ~client.models.ManagedDisk :ivar write_accelerator_enabled: Specifies whether writeAccelerator should be enabled or disabled on the disk. :vartype write_accelerator_enabled: bool @@ -10202,10 +10110,10 @@ class OutputFile(_model_base.Model): Required. :vartype file_pattern: str :ivar destination: The destination for the output file(s). Required. - :vartype destination: ~azure.batch.models.OutputFileDestination + :vartype destination: ~client.models.OutputFileDestination :ivar upload_options: Additional options for the upload operation, including under what conditions to perform the upload. Required. - :vartype upload_options: ~azure.batch.models.OutputFileUploadConfig + :vartype upload_options: ~client.models.OutputFileUploadConfig """ file_pattern: str = rest_field(name="filePattern", visibility=["read", "create", "update", "delete", "query"]) @@ -10269,13 +10177,13 @@ class OutputFileBlobContainerDestination(_model_base.Model): :ivar identity_reference: The reference to the user assigned identity to use to access Azure Blob Storage specified by containerUrl. The identity must have write access to the Azure Blob Storage container. - :vartype identity_reference: ~azure.batch.models.BatchNodeIdentityReference + :vartype identity_reference: ~client.models.BatchNodeIdentityReference :ivar upload_headers: A list of name-value pairs for headers to be used in uploading output files. These headers will be specified when uploading files to Azure Storage. Official document on allowed headers when uploading blobs: `https://learn.microsoft.com/rest/api/storageservices/put-blob#request-headers-all-blob-types `_. - :vartype upload_headers: list[~azure.batch.models.HttpHeader] + :vartype upload_headers: list[~client.models.HttpHeader] """ path: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -10328,7 +10236,7 @@ class OutputFileDestination(_model_base.Model): """The destination to which a file should be uploaded. :ivar container: A location in Azure blob storage to which files are uploaded. - :vartype container: ~azure.batch.models.OutputFileBlobContainerDestination + :vartype container: ~client.models.OutputFileBlobContainerDestination """ container: Optional["_models.OutputFileBlobContainerDestination"] = rest_field( @@ -10361,7 +10269,7 @@ class OutputFileUploadConfig(_model_base.Model): :ivar upload_condition: The conditions under which the Task output file or set of files should be uploaded. The default is taskcompletion. Required. Known values are: "tasksuccess", "taskfailure", and "taskcompletion". - :vartype upload_condition: str or ~azure.batch.models.OutputFileUploadCondition + :vartype upload_condition: str or ~client.models.OutputFileUploadCondition """ upload_condition: Union[str, "_models.OutputFileUploadCondition"] = rest_field( @@ -10395,7 +10303,7 @@ class PublicIpAddressConfiguration(_model_base.Model): :ivar ip_address_provisioning_type: The provisioning type for Public IP Addresses for the Pool. The default value is BatchManaged. Known values are: "batchmanaged", "usermanaged", and "nopublicipaddresses". - :vartype ip_address_provisioning_type: str or ~azure.batch.models.IpAddressProvisioningType + :vartype ip_address_provisioning_type: str or ~client.models.IpAddressProvisioningType :ivar ip_address_ids: The list of public IPs which the Batch service will use when provisioning Compute Nodes. The number of IPs specified here limits the maximum size of the Pool - 100 dedicated nodes or 100 Spot/Low-priority nodes can be allocated for each public IP. For @@ -10482,7 +10390,7 @@ class ResizeError(_model_base.Model): in a user interface. :vartype message: str :ivar values_property: A list of additional error details related to the Pool resize error. - :vartype values_property: list[~azure.batch.models.NameValuePair] + :vartype values_property: list[~client.models.NameValuePair] """ code: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -10559,7 +10467,7 @@ class ResourceFile(_model_base.Model): :vartype file_mode: str :ivar identity_reference: The reference to the user assigned identity to use to access Azure Blob Storage specified by storageContainerUrl or httpUrl. - :vartype identity_reference: ~azure.batch.models.BatchNodeIdentityReference + :vartype identity_reference: ~client.models.BatchNodeIdentityReference """ auto_storage_container_name: Optional[str] = rest_field( @@ -10761,11 +10669,11 @@ class SecurityProfile(_model_base.Model): :ivar security_type: Specifies the SecurityType of the virtual machine. It has to be set to any specified value to enable UefiSettings. Required. Known values are: "trustedLaunch" and "confidentialVM". - :vartype security_type: str or ~azure.batch.models.SecurityTypes + :vartype security_type: str or ~client.models.SecurityTypes :ivar uefi_settings: Specifies the security settings like secure boot and vTPM used while creating the virtual machine. Specifies the security settings like secure boot and vTPM used while creating the virtual machine. Required. - :vartype uefi_settings: ~azure.batch.models.UefiSettings + :vartype uefi_settings: ~client.models.UefiSettings """ encryption_at_host: bool = rest_field( @@ -10893,13 +10801,13 @@ class UpgradePolicy(_model_base.Model): **Automatic** - All virtual machines in the scale set are automatically updated at the same time.

**Rolling** - Scale set performs updates in batches with an optional pause time in between. Required. Known values are: "automatic", "manual", and "rolling". - :vartype mode: str or ~azure.batch.models.UpgradeMode + :vartype mode: str or ~client.models.UpgradeMode :ivar automatic_os_upgrade_policy: Configuration parameters used for performing automatic OS Upgrade. The configuration parameters used for performing automatic OS upgrade. - :vartype automatic_os_upgrade_policy: ~azure.batch.models.AutomaticOsUpgradePolicy + :vartype automatic_os_upgrade_policy: ~client.models.AutomaticOsUpgradePolicy :ivar rolling_upgrade_policy: The configuration parameters used while performing a rolling upgrade. - :vartype rolling_upgrade_policy: ~azure.batch.models.RollingUpgradePolicy + :vartype rolling_upgrade_policy: ~client.models.RollingUpgradePolicy """ mode: Union[str, "_models.UpgradeMode"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -10962,7 +10870,7 @@ class UploadBatchServiceLogsContent(_model_base.Model): :ivar identity_reference: The reference to the user assigned identity to use to access Azure Blob Storage specified by containerUrl. The identity must have write access to the Azure Blob Storage container. - :vartype identity_reference: ~azure.batch.models.BatchNodeIdentityReference + :vartype identity_reference: ~client.models.BatchNodeIdentityReference """ container_url: str = rest_field(name="containerUrl", visibility=["read", "create", "update", "delete", "query"]) @@ -11066,15 +10974,15 @@ class UserAccount(_model_base.Model): :vartype password: str :ivar elevation_level: The elevation level of the user Account. The default value is nonAdmin. Known values are: "nonadmin" and "admin". - :vartype elevation_level: str or ~azure.batch.models.ElevationLevel + :vartype elevation_level: str or ~client.models.ElevationLevel :ivar linux_user_configuration: The Linux-specific user configuration for the user Account. This property is ignored if specified on a Windows Pool. If not specified, the user is created with the default options. - :vartype linux_user_configuration: ~azure.batch.models.LinuxUserConfiguration + :vartype linux_user_configuration: ~client.models.LinuxUserConfiguration :ivar windows_user_configuration: The Windows-specific user configuration for the user Account. This property can only be specified if the user is on a Windows Pool. If not specified and on a Windows Pool, the user is created with the default options. - :vartype windows_user_configuration: ~azure.batch.models.WindowsUserConfiguration + :vartype windows_user_configuration: ~client.models.WindowsUserConfiguration """ name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -11166,7 +11074,7 @@ class UserIdentity(_model_base.Model): :vartype username: str :ivar auto_user: The auto user under which the Task is run. The userName and autoUser properties are mutually exclusive; you must specify one but not both. - :vartype auto_user: ~azure.batch.models.AutoUserSpecification + :vartype auto_user: ~client.models.AutoUserSpecification """ username: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -11203,7 +11111,7 @@ class VirtualMachineConfiguration(_model_base.Model): :ivar image_reference: A reference to the Azure Virtual Machines Marketplace Image or the custom Virtual Machine Image to use. Required. - :vartype image_reference: ~azure.batch.models.ImageReference + :vartype image_reference: ~client.models.ImageReference :ivar node_agent_sku_id: The SKU of the Batch Compute Node agent to be provisioned on Compute Nodes in the Pool. The Batch Compute Node agent is a program that runs on each Compute Node in the Pool, and provides the command-and-control interface between the Compute Node and the Batch @@ -11215,7 +11123,7 @@ class VirtualMachineConfiguration(_model_base.Model): :vartype node_agent_sku_id: str :ivar windows_configuration: Windows operating system settings on the virtual machine. This property must not be specified if the imageReference property specifies a Linux OS Image. - :vartype windows_configuration: ~azure.batch.models.WindowsConfiguration + :vartype windows_configuration: ~client.models.WindowsConfiguration :ivar data_disks: The configuration for data disks attached to the Compute Nodes in the Pool. This property must be specified if the Compute Nodes in the Pool need to have empty data disks attached to them. This cannot be updated. Each Compute Node gets its own disk (the disk is not @@ -11228,7 +11136,7 @@ class VirtualMachineConfiguration(_model_base.Model): and `https://learn.microsoft.com/azure/virtual-machines/windows/attach-disk-ps#add-an-empty-data-disk-to-a-virtual-machine `_. - :vartype data_disks: list[~azure.batch.models.DataDisk] + :vartype data_disks: list[~client.models.DataDisk] :ivar license_type: This only applies to Images that contain the Windows operating system, and should only be used when you hold valid on-premises licenses for the Compute Nodes which will be deployed. If omitted, no on-premises licensing discount is @@ -11242,26 +11150,26 @@ class VirtualMachineConfiguration(_model_base.Model): performed on each Compute Node in the Pool to allow Tasks to run in containers. All regular Tasks and Job manager Tasks run on this Pool must specify the containerSettings property, and all other Tasks may specify it. - :vartype container_configuration: ~azure.batch.models.ContainerConfiguration + :vartype container_configuration: ~client.models.ContainerConfiguration :ivar disk_encryption_configuration: The disk encryption configuration for the pool. If specified, encryption is performed on each node in the pool during node provisioning. - :vartype disk_encryption_configuration: ~azure.batch.models.DiskEncryptionConfiguration + :vartype disk_encryption_configuration: ~client.models.DiskEncryptionConfiguration :ivar node_placement_configuration: The node placement configuration for the pool. This configuration will specify rules on how nodes in the pool will be physically allocated. - :vartype node_placement_configuration: ~azure.batch.models.BatchNodePlacementConfiguration + :vartype node_placement_configuration: ~client.models.BatchNodePlacementConfiguration :ivar extensions: The virtual machine extension for the pool. If specified, the extensions mentioned in this configuration will be installed on each node. - :vartype extensions: list[~azure.batch.models.VMExtension] + :vartype extensions: list[~client.models.VMExtension] :ivar os_disk: Settings for the operating system disk of the Virtual Machine. - :vartype os_disk: ~azure.batch.models.OSDisk + :vartype os_disk: ~client.models.OSDisk :ivar security_profile: Specifies the security profile settings for the virtual machine or virtual machine scale set. - :vartype security_profile: ~azure.batch.models.SecurityProfile + :vartype security_profile: ~client.models.SecurityProfile :ivar service_artifact_reference: Specifies the service artifact reference id used to set same image version for all virtual machines in the scale set when using 'latest' image version. The service artifact reference id in the form of /subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Compute/galleries/{galleryName}/serviceArtifacts/{serviceArtifactName}/vmArtifactsProfiles/{vmArtifactsProfilesName}. - :vartype service_artifact_reference: ~azure.batch.models.ServiceArtifactReference + :vartype service_artifact_reference: ~client.models.ServiceArtifactReference """ image_reference: "_models.ImageReference" = rest_field( @@ -11379,7 +11287,7 @@ class VirtualMachineInfo(_model_base.Model): """Info about the current state of the virtual machine. :ivar image_reference: The reference to the Azure Virtual Machine's Marketplace Image. - :vartype image_reference: ~azure.batch.models.ImageReference + :vartype image_reference: ~client.models.ImageReference :ivar scale_set_vm_resource_id: The resource ID of the Compute Node's current Virtual Machine Scale Set VM. Only defined if the Batch Account was created with its poolAllocationMode property set to 'UserSubscription'. @@ -11424,7 +11332,7 @@ class VMDiskSecurityProfile(_model_base.Model): persisting firmware state in the VMGuestState blob. **Note**: It can be set for only Confidential VMs and is required when using Confidential VMs. Known values are: "NonPersistedTPM" and "VMGuestStateOnly". - :vartype security_encryption_type: str or ~azure.batch.models.SecurityEncryptionTypes + :vartype security_encryption_type: str or ~client.models.SecurityEncryptionTypes """ security_encryption_type: Optional[Union[str, "_models.SecurityEncryptionTypes"]] = rest_field( @@ -11547,9 +11455,9 @@ class VMExtensionInstanceView(_model_base.Model): :ivar name: The name of the vm extension instance view. :vartype name: str :ivar statuses: The resource status information. - :vartype statuses: list[~azure.batch.models.InstanceViewStatus] + :vartype statuses: list[~client.models.InstanceViewStatus] :ivar sub_statuses: The resource status information. - :vartype sub_statuses: list[~azure.batch.models.InstanceViewStatus] + :vartype sub_statuses: list[~client.models.InstanceViewStatus] """ name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) @@ -11620,7 +11528,7 @@ class WindowsUserConfiguration(_model_base.Model): :ivar login_mode: The login mode for the user. The default is 'batch'. Known values are: "batch" and "interactive". - :vartype login_mode: str or ~azure.batch.models.LoginMode + :vartype login_mode: str or ~client.models.LoginMode """ login_mode: Optional[Union[str, "_models.LoginMode"]] = rest_field( diff --git a/sdk/batch/azure-batch/azure/batch/models/_patch.py b/sdk/batch/azure-batch/azure/batch/models/_patch.py index 6435ce16c022..aeb7eed86afc 100644 --- a/sdk/batch/azure-batch/azure/batch/models/_patch.py +++ b/sdk/batch/azure-batch/azure/batch/models/_patch.py @@ -19,6 +19,7 @@ "BatchFileProperties", ] # Add all objects you want publicly available to users at this package level + class CreateTasksError(HttpResponseError): """Aggregate Exception containing details for any failures from a task add operation. @@ -62,8 +63,8 @@ def __init__(self, pending_tasks=[], failure_tasks=[], errors=[]): ) super(CreateTasksError, self).__init__(self.message) -class BatchFileProperties: +class BatchFileProperties: """Information about a file or directory on a Compute Node with additional properties. :ivar url: The URL of the file. @@ -84,7 +85,7 @@ class BatchFileProperties: :vartype file_mode: str """ - url: Optional[str] + url: Optional[str] """The URL of the file.""" is_directory: Optional[bool] """Whether the object represents a directory.""" @@ -119,6 +120,7 @@ def __init__( self.content_type = content_type self.file_mode = file_mode + def patch_sdk(): """Do not remove from this file. diff --git a/sdk/batch/azure-batch/client/__init__.py b/sdk/batch/azure-batch/client/__init__.py new file mode 100644 index 000000000000..a02f2bbd5c47 --- /dev/null +++ b/sdk/batch/azure-batch/client/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import BatchClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/_client.py b/sdk/batch/azure-batch/client/_client.py new file mode 100644 index 000000000000..4ce5c282dbf4 --- /dev/null +++ b/sdk/batch/azure-batch/client/_client.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import BatchClientConfiguration +from ._operations import BatchClientOperationsMixin +from ._serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class BatchClient(BatchClientOperationsMixin): + """BatchClient. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = BatchClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + kwargs["request_id_header_name"] = "client-request-id" + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/batch/azure-batch/client/_configuration.py b/sdk/batch/azure-batch/client/_configuration.py new file mode 100644 index 000000000000..556c0855eefa --- /dev/null +++ b/sdk/batch/azure-batch/client/_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class BatchClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BatchClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-07-01.20.0") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://batch.core.windows.net//.default"]) + kwargs.setdefault("sdk_moniker", "batch/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/batch/azure-batch/client/_model_base.py b/sdk/batch/azure-batch/client/_model_base.py new file mode 100644 index 000000000000..065b17f67c46 --- /dev/null +++ b/sdk/batch/azure-batch/client/_model_base.py @@ -0,0 +1,1236 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) # pylint: disable=no-value-for-parameter + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/batch/azure-batch/client/_operations/__init__.py b/sdk/batch/azure-batch/client/_operations/__init__.py new file mode 100644 index 000000000000..ea39f177e86d --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import BatchClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/_operations/_operations.py b/sdk/batch/azure-batch/client/_operations/_operations.py new file mode 100644 index 000000000000..2a09e3f8b76e --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/_operations.py @@ -0,0 +1,11483 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +import json +import sys +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, TypeVar +import urllib.parse + +from azure.core import MatchConditions +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from ...azure.batch import models as _azure_batch_models4 +from .._model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._serialization import Serializer +from .._vendor import BatchClientMixinABC, prep_if_match, prep_if_none_match + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_batch_list_applications_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/applications" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_application_request( + application_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/applications/{applicationId}" + path_format_arguments = { + "applicationId": _SERIALIZER.url("application_id", application_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pool_usage_metrics_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/poolusagemetrics" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if starttime is not None: + _params["startTime"] = _SERIALIZER.query("starttime", starttime, "iso-8601") + if endtime is not None: + _params["endtime"] = _SERIALIZER.query("endtime", endtime, "iso-8601") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_pool_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pools_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_pool_exists_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/disableautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/enableautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_evaluate_pool_auto_scale_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/evaluateautoscale" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_resize_pool_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/resize" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_stop_pool_resize_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/stopresize" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_pool_properties_request( # pylint: disable=name-too-long + pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/updateproperties" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_remove_nodes_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/removenodes" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_supported_images_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/supportedimages" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_pool_node_counts_request( # pylint: disable=name-too-long + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/nodecounts" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/disable" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/enable" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_job_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/terminate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_job_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_jobs_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_jobs_from_schedule_request( # pylint: disable=name-too-long + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/jobs" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_job_preparation_and_release_task_status_request( # pylint: disable=name-too-long + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/jobpreparationandreleasetaskstatus" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_task_counts_request( + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/taskcounts" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_certificate_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_certificates_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_cancel_certificate_deletion_request( # pylint: disable=name-too-long + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})/canceldelete" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_certificate_request( + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_certificate_request( + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/certificates(thumbprintAlgorithm={thumbprintAlgorithm},thumbprint={thumbprint})" + path_format_arguments = { + "thumbprintAlgorithm": _SERIALIZER.url("thumbprint_algorithm", thumbprint_algorithm, "str"), + "thumbprint": _SERIALIZER.url("thumbprint", thumbprint, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_job_schedule_exists_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_update_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/disable" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_job_schedule_request( + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/enable" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_job_schedule_request( # pylint: disable=name-too-long + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules/{jobScheduleId}/terminate" + path_format_arguments = { + "jobScheduleId": _SERIALIZER.url("job_schedule_id", job_schedule_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if force is not None: + _params["force"] = _SERIALIZER.query("force", force, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_job_schedule_request( + *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_job_schedules_request( + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobschedules" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_task_request( + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_tasks_request( + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_task_collection_request( # pylint: disable=name-too-long + job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/addtaskcollection" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + if expand is not None: + _params["$expand"] = _SERIALIZER.query("expand", expand, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_sub_tasks_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/subtasksinfo" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_terminate_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/terminate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reactivate_task_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/reactivate" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + if_match = prep_if_match(etag, match_condition) + if if_match is not None: + _headers["If-Match"] = _SERIALIZER.header("if_match", if_match, "str") + if_none_match = prep_if_none_match(etag, match_condition) + if if_none_match is not None: + _headers["If-None-Match"] = _SERIALIZER.header("if_none_match", if_none_match, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_task_file_request( + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_file_request( + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/octet-stream") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if ocp_range is not None: + _headers["ocp-range"] = _SERIALIZER.header("ocp_range", ocp_range, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_task_file_properties_request( # pylint: disable=name-too-long + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files/{filePath}" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_task_files_request( + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/jobs/{jobId}/tasks/{taskId}/files" + path_format_arguments = { + "jobId": _SERIALIZER.url("job_id", job_id, "str"), + "taskId": _SERIALIZER.url("task_id", task_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_create_node_user_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_node_user_request( + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users/{userName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "userName": _SERIALIZER.url("user_name", user_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_replace_node_user_request( + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/users/{userName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "userName": _SERIALIZER.url("user_name", user_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reboot_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/reboot" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_start_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/start" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_reimage_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/reimage" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_deallocate_node_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/deallocate" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_disable_node_scheduling_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/disablescheduling" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_enable_node_scheduling_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/enablescheduling" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_remote_login_settings_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/remoteloginsettings" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_upload_node_logs_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/uploadbatchservicelogs" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_nodes_request( + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_extension_request( + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/extensions/{extensionName}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "extensionName": _SERIALIZER.url("extension_name", extension_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_node_extensions_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/extensions" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if select is not None: + _params["$select"] = _SERIALIZER.query("select", select, "[str]", div=",") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_delete_node_file_request( + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_file_request( + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/octet-stream") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + if ocp_range is not None: + _headers["ocp-range"] = _SERIALIZER.header("ocp_range", ocp_range, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_get_node_file_properties_request( # pylint: disable=name-too-long + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files/{filePath}" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + "filePath": _SERIALIZER.url("file_path", file_path, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + if if_modified_since is not None: + _headers["If-Modified-Since"] = _SERIALIZER.header("if_modified_since", if_modified_since, "rfc-1123") + if if_unmodified_since is not None: + _headers["If-Unmodified-Since"] = _SERIALIZER.header("if_unmodified_since", if_unmodified_since, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="HEAD", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_batch_list_node_files_request( + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-07-01.20.0")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/pools/{poolId}/nodes/{nodeId}/files" + path_format_arguments = { + "poolId": _SERIALIZER.url("pool_id", pool_id, "str"), + "nodeId": _SERIALIZER.url("node_id", node_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if timeout is not None: + _params["timeOut"] = _SERIALIZER.query("timeout", timeout, "int") + if max_results is not None: + _params["maxresults"] = _SERIALIZER.query("max_results", max_results, "int") + if filter is not None: + _params["$filter"] = _SERIALIZER.query("filter", filter, "str") + if recursive is not None: + _params["recursive"] = _SERIALIZER.query("recursive", recursive, "bool") + + # Construct headers + if ocpdate is not None: + _headers["ocp-date"] = _SERIALIZER.header("ocpdate", ocpdate, "rfc-1123") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class BatchClientOperationsMixin(BatchClientMixinABC): # pylint: disable=too-many-public-methods + + @distributed_trace + def list_applications( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchApplication"]: + """Lists all of the applications available in the specified Account. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :return: An iterator like instance of BatchApplication + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchApplication] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchApplication]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_applications_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchApplication], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_application( + self, + application_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchApplication: + """Gets information about the specified Application. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about Applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :param application_id: The ID of the Application. Required. + :type application_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchApplication. The BatchApplication is compatible with MutableMapping + :rtype: ~client.models.BatchApplication + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchApplication] = kwargs.pop("cls", None) + + _request = build_batch_get_application_request( + application_id=application_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchApplication, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_pool_usage_metrics( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchPoolUsageMetrics"]: + """Lists the usage metrics, aggregated by Pool across individual time intervals, + for the specified Account. + + If you do not specify a $filter clause including a poolId, the response + includes all Pools that existed in the Account in the time range of the + returned aggregation intervals. If you do not specify a $filter clause + including a startTime or endTime these filters default to the start and end + times of the last aggregation interval currently available; that is, only the + last aggregation interval is returned. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword starttime: The earliest time from which to include metrics. This must be at least two + and + a half hours before the current time. If not specified this defaults to the + start time of the last aggregation interval currently available. Default value is None. + :paramtype starttime: ~datetime.datetime + :keyword endtime: The latest time from which to include metrics. This must be at least two + hours + before the current time. If not specified this defaults to the end time of the + last aggregation interval currently available. Default value is None. + :paramtype endtime: ~datetime.datetime + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolUsageMetrics + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchPoolUsageMetrics] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPoolUsageMetrics]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_usage_metrics_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + starttime=starttime, + endtime=endtime, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPoolUsageMetrics], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_pool( # pylint: disable=inconsistent-return-statements + self, + pool: _azure_batch_models4.BatchPoolCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Pool to the specified Account. + + When naming Pools, avoid including sensitive information such as user names or + secret project names. This information may appear in telemetry logs accessible + to Microsoft Support engineers. + + :param pool: The Pool to be created. Required. + :type pool: ~client.models.BatchPoolCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_pool_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_pools( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchPool"]: + """Lists all of the Pools which be mounted. + + Lists all of the Pools which be mounted. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchPool + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchPool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pools_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPool], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Pool from the specified Account. + + When you request that a Pool be deleted, the following actions occur: the Pool + state is set to deleting; any ongoing resize operation on the Pool are stopped; + the Batch service starts resizing the Pool to zero Compute Nodes; any Tasks + running on existing Compute Nodes are terminated and requeued (as if a resize + Pool operation had been requested with the default requeue option); finally, + the Pool is removed from the system. Because running Tasks are requeued, the + user can rerun these Tasks by updating their Job to target a different Pool. + The Tasks can then run on the new Pool. If you want to override the requeue + behavior, then you should call resize Pool explicitly to shrink the Pool to + zero size before deleting the Pool. If you call an Update, Patch or Delete API + on a Pool in the deleting state, it will fail with HTTP status code 409 with + error code PoolBeingDeleted. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def pool_exists( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Gets basic properties of a Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_pool_exists_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def get_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchPool: + """Gets information about the specified Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchPool. The BatchPool is compatible with MutableMapping + :rtype: ~client.models.BatchPool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchPool] = kwargs.pop("cls", None) + + _request = build_batch_get_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchPool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + pool: _azure_batch_models4.BatchPoolUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This only replaces the Pool properties specified in the request. For example, + if the Pool has a StartTask associated with it, and a request does not specify + a StartTask element, then the Pool keeps the existing StartTask. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param pool: The pool properties to update. Required. + :type pool: ~client.models.BatchPoolUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_pool_auto_scale( # pylint: disable=inconsistent-return-statements + self, pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> None: + """Disables automatic scaling for a Pool. + + Disables automatic scaling for a Pool. + + :param pool_id: The ID of the Pool on which to disable automatic scaling. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_pool_auto_scale( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolEnableAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables automatic scaling for a Pool. + + You cannot enable automatic scaling on a Pool if a resize operation is in + progress on the Pool. If automatic scaling of the Pool is currently disabled, + you must specify a valid autoscale formula as part of the request. If automatic + scaling of the Pool is already enabled, you may specify a new autoscale formula + and/or a new evaluation interval. You cannot call this API for the same Pool + more than once every 30 seconds. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for enabling automatic scaling. Required. + :type content: ~client.models.BatchPoolEnableAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_enable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def evaluate_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolEvaluateAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.AutoScaleRun: + """Gets the result of evaluating an automatic scaling formula on the Pool. + + This API is primarily for validating an autoscale formula, as it simply returns + the result without applying the formula to the Pool. The Pool must have auto + scaling enabled in order to evaluate a formula. + + :param pool_id: The ID of the Pool on which to evaluate the automatic scaling formula. + Required. + :type pool_id: str + :param content: The options to use for evaluating the automatic scaling formula. Required. + :type content: ~client.models.BatchPoolEvaluateAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: AutoScaleRun. The AutoScaleRun is compatible with MutableMapping + :rtype: ~client.models.AutoScaleRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.AutoScaleRun] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_evaluate_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.AutoScaleRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def resize_pool( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchPoolResizeContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Changes the number of Compute Nodes that are assigned to a Pool. + + You can only resize a Pool when its allocation state is steady. If the Pool is + already resizing, the request fails with status code 409. When you resize a + Pool, the Pool's allocation state changes from steady to resizing. You cannot + resize Pools which are configured for automatic scaling. If you try to do this, + the Batch service returns an error 409. If you resize a Pool downwards, the + Batch service chooses which Compute Nodes to remove. To remove specific Compute + Nodes, use the Pool remove Compute Nodes API instead. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for resizing the pool. Required. + :type content: ~client.models.BatchPoolResizeContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_resize_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def stop_pool_resize( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Stops an ongoing resize operation on the Pool. + + This does not restore the Pool to its previous state before the resize + operation: it only stops any further changes being made, and the Pool maintains + its current state. After stopping, the Pool stabilizes at the number of Compute + Nodes it was at when the stop operation was done. During the stop operation, + the Pool allocation state changes first to stopping and then to steady. A + resize operation need not be an explicit resize Pool request; this API can also + be used to halt the initial sizing of the Pool when it is created. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_stop_pool_resize_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_pool_properties( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + pool: _azure_batch_models4.BatchPoolReplaceContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This fully replaces all the updatable properties of the Pool. For example, if + the Pool has a StartTask associated with it and if StartTask is not specified + with this request, then the Batch service will remove the existing StartTask. + + :param pool_id: The ID of the Pool to update. Required. + :type pool_id: str + :param pool: The options to use for replacing properties on the pool. Required. + :type pool: ~client.models.BatchPoolReplaceContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_pool_properties_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def remove_nodes( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + content: _azure_batch_models4.BatchNodeRemoveContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Removes Compute Nodes from the specified Pool. + + This operation can only run when the allocation state of the Pool is steady. + When this operation runs, the allocation state changes from steady to resizing. + Each request may remove up to 100 nodes. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for removing the node. Required. + :type content: ~client.models.BatchNodeRemoveContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_remove_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_supported_images( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchSupportedImage"]: + """Lists all Virtual Machine Images supported by the Azure Batch service. + + Lists all Virtual Machine Images supported by the Azure Batch service. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchSupportedImage + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchSupportedImage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchSupportedImage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_supported_images_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchSupportedImage], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_pool_node_counts( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchPoolNodeCounts"]: + """Gets the number of Compute Nodes in each state, grouped by Pool. Note that the + numbers returned may not always be up to date. If you need exact node counts, + use a list query. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolNodeCounts + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchPoolNodeCounts] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchPoolNodeCounts]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_node_counts_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchPoolNodeCounts], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job. + + Deleting a Job also deletes all Tasks that are part of that Job, and all Job + statistics. This also overrides the retention period for Task data; that is, if + the Job contains Tasks which are still retained on Compute Nodes, the Batch + services deletes those Tasks' working directories and all their contents. When + a Delete Job request is received, the Batch service sets the Job to the + deleting state. All update operations on a Job that is in deleting state will + fail with status code 409 (Conflict), with additional information indicating + that the Job is being deleted. + + :param job_id: The ID of the Job to delete. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the Job even if the corresponding nodes have + not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchJob: + """Gets information about the specified Job. + + Gets information about the specified Job. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJob. The BatchJob is compatible with MutableMapping + :rtype: ~client.models.BatchJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchJob] = kwargs.pop("cls", None) + + _request = build_batch_get_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchJob, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + job: _azure_batch_models4.BatchJobUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This replaces only the Job properties specified in the request. For example, if + the Job has constraints, and a request does not specify the constraints + element, then the Job keeps the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: The options to use for updating the Job. Required. + :type job: ~client.models.BatchJobUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + job: _azure_batch_models4.BatchJob, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This fully replaces all the updatable properties of the Job. For example, if + the Job has constraints associated with it and if constraints is not specified + with this request, then the Batch service will remove the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: A job with updated properties. Required. + :type job: ~client.models.BatchJob + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + content: _azure_batch_models4.BatchJobDisableContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables the specified Job, preventing new Tasks from running. + + The Batch Service immediately moves the Job to the disabling state. Batch then + uses the disableTasks parameter to determine what to do with the currently + running Tasks of the Job. The Job remains in the disabling state until the + disable operation is completed and all Tasks have been dealt with according to + the disableTasks option; the Job then moves to the disabled state. No new Tasks + are started under the Job until it moves back to active state. If you try to + disable a Job that is in any state other than active, disabling, or disabled, + the request fails with status code 409. + + :param job_id: The ID of the Job to disable. Required. + :type job_id: str + :param content: The options to use for disabling the Job. Required. + :type content: ~client.models.BatchJobDisableContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_disable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables the specified Job, allowing new Tasks to run. + + When you call this API, the Batch service sets a disabled Job to the enabling + state. After the this operation is completed, the Job moves to the active + state, and scheduling of new Tasks under the Job resumes. The Batch service + does not allow a Task to remain in the active state for more than 180 days. + Therefore, if you enable a Job containing active Tasks which were added more + than 180 days ago, those Tasks will not run. + + :param job_id: The ID of the Job to enable. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def terminate_job( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + parameters: Optional[_azure_batch_models4.BatchJobTerminateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Job, marking it as completed. + + When a Terminate Job request is received, the Batch service sets the Job to the + terminating state. The Batch service then terminates any running Tasks + associated with the Job and runs any required Job release Tasks. Then the Job + moves into the completed state. If there are any Tasks in the Job in the active + state, they will remain in the active state. Once a Job is terminated, new + Tasks cannot be added and any remaining active Tasks will not be scheduled. + + :param job_id: The ID of the Job to terminate. Required. + :type job_id: str + :param parameters: The options to use for terminating the Job. Default value is None. + :type parameters: ~client.models.BatchJobTerminateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the Job even if the corresponding nodes have + not fully processed the termination. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_terminate_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def create_job( # pylint: disable=inconsistent-return-statements + self, + job: _azure_batch_models4.BatchJobCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job to the specified Account. + + The Batch service supports two ways to control the work done as part of a Job. + In the first approach, the user specifies a Job Manager Task. The Batch service + launches this Task when it is ready to start the Job. The Job Manager Task + controls all other Tasks that run under this Job, by using the Task APIs. In + the second approach, the user directly controls the execution of Tasks under an + active Job, by using the Task APIs. Also note: when naming Jobs, avoid + including sensitive information such as user names or secret project names. + This information may appear in telemetry logs accessible to Microsoft Support + engineers. + + :param job: The Job to be created. Required. + :type job: ~client.models.BatchJobCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_jobs( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchJob"]: + """Lists all of the Jobs in the specified Account. + + Lists all of the Jobs in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs + `_. Default + value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_jobs_from_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchJob"]: + """Lists the Jobs that have been created under the specified Job Schedule. + + Lists the Jobs that have been created under the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule from which you want to get a list of Jobs. + Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_from_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_job_preparation_and_release_task_status( # pylint: disable=name-too-long + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus"]: + """Lists the execution status of the Job Preparation and Job Release Task for the + specified Job across the Compute Nodes where the Job has run. + + This API returns the Job Preparation and Job Release Task status on all Compute + Nodes that have run the Job Preparation or Job Release Task. This includes + Compute Nodes which have since been removed from the Pool. If this API is + invoked on a Job which has no Job Preparation or Job Release Task, the Batch + service returns HTTP status code 409 (Conflict) with an error code of + JobPreparationTaskNotSpecified. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchJobPreparationAndReleaseTaskStatus + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchJobPreparationAndReleaseTaskStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_preparation_and_release_task_status_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_azure_batch_models4.BatchJobPreparationAndReleaseTaskStatus], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_job_task_counts( + self, job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> _azure_batch_models4.BatchTaskCountsResult: + """Gets the Task counts for the specified Job. + + Task counts provide a count of the Tasks by active, running or completed Task + state, and a count of Tasks which succeeded or failed. Tasks in the preparing + state are counted as running. Note that the numbers returned may not always be + up to date. If you need exact task counts, use a list query. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskCountsResult. The BatchTaskCountsResult is compatible with MutableMapping + :rtype: ~client.models.BatchTaskCountsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchTaskCountsResult] = kwargs.pop("cls", None) + + _request = build_batch_get_job_task_counts_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTaskCountsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def create_certificate( # pylint: disable=inconsistent-return-statements + self, + certificate: _azure_batch_models4.BatchCertificate, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Certificate to the specified Account. + + Creates a Certificate to the specified Account. + + :param certificate: The Certificate to be created. Required. + :type certificate: ~client.models.BatchCertificate + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(certificate, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_certificate_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_certificates( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchCertificate"]: + """Lists all of the Certificates that have been added to the specified Account. + + Lists all of the Certificates that have been added to the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-certificates + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchCertificate + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchCertificate] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchCertificate]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_certificates_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchCertificate], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def cancel_certificate_deletion( # pylint: disable=inconsistent-return-statements + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Cancels a failed deletion of a Certificate from the specified Account. + + If you try to delete a Certificate that is being used by a Pool or Compute + Node, the status of the Certificate changes to deleteFailed. If you decide that + you want to continue using the Certificate, you can use this operation to set + the status of the Certificate back to active. If you intend to delete the + Certificate, you do not need to run this operation after the deletion failed. + You must make sure that the Certificate is not being used by any resources, and + then you can try again to delete the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate being deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_cancel_certificate_deletion_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_certificate( # pylint: disable=inconsistent-return-statements + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a Certificate from the specified Account. + + You cannot delete a Certificate if a resource (Pool or Compute Node) is using + it. Before you can delete a Certificate, you must therefore make sure that the + Certificate is not associated with any existing Pools, the Certificate is not + installed on any Nodes (even if you remove a Certificate from a Pool, it is not + removed from existing Compute Nodes in that Pool until they restart), and no + running Tasks depend on the Certificate. If you try to delete a Certificate + that is in use, the deletion fails. The Certificate status changes to + deleteFailed. You can use Cancel Delete Certificate to set the status back to + active if you decide that you want to continue using the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to be deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchCertificate: + """Gets information about the specified Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to get. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchCertificate. The BatchCertificate is compatible with MutableMapping + :rtype: ~client.models.BatchCertificate + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchCertificate] = kwargs.pop("cls", None) + + _request = build_batch_get_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchCertificate, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def job_schedule_exists( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Checks the specified Job Schedule exists. + + Checks the specified Job Schedule exists. + + :param job_schedule_id: The ID of the Job Schedule which you want to check. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_job_schedule_exists_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def delete_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job Schedule from the specified Account. + + When you delete a Job Schedule, this also deletes all Jobs and Tasks under that + schedule. When Tasks are deleted, all the files in their working directories on + the Compute Nodes are also deleted (the retention period is ignored). The Job + Schedule statistics are no longer accessible once the Job Schedule is deleted, + though they are still counted towards Account lifetime statistics. + + :param job_schedule_id: The ID of the Job Schedule to delete. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the JobSchedule even if the corresponding nodes + have not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchJobSchedule: + """Gets information about the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to get. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJobSchedule. The BatchJobSchedule is compatible with MutableMapping + :rtype: ~client.models.BatchJobSchedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchJobSchedule] = kwargs.pop("cls", None) + + _request = build_batch_get_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchJobSchedule, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def update_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + job_schedule: _azure_batch_models4.BatchJobScheduleUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This replaces only the Job Schedule properties specified in the request. For + example, if the schedule property is not specified with this request, then the + Batch service will keep the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: The options to use for updating the Job Schedule. Required. + :type job_schedule: ~client.models.BatchJobScheduleUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + job_schedule: _azure_batch_models4.BatchJobSchedule, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This fully replaces all the updatable properties of the Job Schedule. For + example, if the schedule property is not specified with this request, then the + Batch service will remove the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: A Job Schedule with updated properties. Required. + :type job_schedule: ~client.models.BatchJobSchedule + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables a Job Schedule. + + No new Jobs will be created until the Job Schedule is enabled again. + + :param job_schedule_id: The ID of the Job Schedule to disable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables a Job Schedule. + + Enables a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to enable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def terminate_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates a Job Schedule. + + Terminates a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to terminates. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the JobSchedule even if the corresponding + nodes have not fully processed the termination. The default value is false. Default value is + None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def create_job_schedule( # pylint: disable=inconsistent-return-statements + self, + job_schedule: _azure_batch_models4.BatchJobScheduleCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job Schedule to the specified Account. + + Creates a Job Schedule to the specified Account. + + :param job_schedule: The Job Schedule to be created. Required. + :type job_schedule: ~client.models.BatchJobScheduleCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_schedule_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_job_schedules( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchJobSchedule"]: + """Lists all of the Job Schedules in the specified Account. + + Lists all of the Job Schedules in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJobSchedule + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchJobSchedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchJobSchedule]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_schedules_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchJobSchedule], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task: _azure_batch_models4.BatchTaskCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Task to the specified Job. + + The maximum lifetime of a Task from addition to completion is 180 days. If a + Task has not completed within 180 days of being added it will be terminated by + the Batch service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task is to be created. Required. + :type job_id: str + :param task: The Task to be created. Required. + :type task: ~client.models.BatchTaskCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_tasks( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchTask"]: + """Lists all of the Tasks that are associated with the specified Job. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchTask + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchTask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchTask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_tasks_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchTask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_task_collection( + self, + job_id: str, + task_collection: _azure_batch_models4.BatchTaskGroup, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchTaskAddCollectionResult: + """Adds a collection of Tasks to the specified Job. + + Note that each Task must have a unique ID. The Batch service may not return the + results for each Task in the same order the Tasks were submitted in this + request. If the server times out or the connection is closed during the + request, the request may have been partially or fully processed, or not at all. + In such cases, the user should re-issue the request. Note that it is up to the + user to correctly handle failures when re-issuing a request. For example, you + should use the same Task IDs during a retry so that if the prior operation + succeeded, the retry will not create extra Tasks unexpectedly. If the response + contains any Tasks which failed to add, a client can retry the request. In a + retry, it is most efficient to resubmit only Tasks that failed to add, and to + omit Tasks that were successfully added on the first attempt. The maximum + lifetime of a Task from addition to completion is 180 days. If a Task has not + completed within 180 days of being added it will be terminated by the Batch + service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task collection is to be added. Required. + :type job_id: str + :param task_collection: The Tasks to be added. Required. + :type task_collection: ~client.models.BatchTaskGroup + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskAddCollectionResult. The BatchTaskAddCollectionResult is compatible with + MutableMapping + :rtype: ~client.models.BatchTaskAddCollectionResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.BatchTaskAddCollectionResult] = kwargs.pop("cls", None) + + _content = json.dumps(task_collection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_collection_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTaskAddCollectionResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Task from the specified Job. + + When a Task is deleted, all of the files in its directory on the Compute Node + where it ran are also deleted (regardless of the retention time). For + multi-instance Tasks, the delete Task operation applies synchronously to the + primary task; subtasks and their files are then deleted asynchronously in the + background. + + :param job_id: The ID of the Job from which to delete the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to delete. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchTask: + """Gets information about the specified Task. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to get information about. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchTask. The BatchTask is compatible with MutableMapping + :rtype: ~client.models.BatchTask + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchTask] = kwargs.pop("cls", None) + + _request = build_batch_get_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchTask, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def replace_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + task: _azure_batch_models4.BatchTask, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Task. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to update. Required. + :type task_id: str + :param task: The Task to update. Required. + :type task: ~client.models.BatchTask + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_sub_tasks( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchSubtask"]: + """Lists all of the subtasks that are associated with the specified multi-instance + Task. + + If the Task is not a multi-instance Task then this returns an empty collection. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :param task_id: The ID of the Task. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchSubtask + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchSubtask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchSubtask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_sub_tasks_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchSubtask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def terminate_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Task. + + When the Task has been terminated, it moves to the completed state. For + multi-instance Tasks, the terminate Task operation applies synchronously to the + primary task; subtasks are then terminated asynchronously in the background. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to terminate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def reactivate_task( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Reactivates a Task, allowing it to run again even if its retry count has been + exhausted. + + Reactivation makes a Task eligible to be retried again up to its maximum retry + count. The Task's state is changed to active. As the Task is no longer in the + completed state, any previous exit code or failure information is no longer + available after reactivation. Each time a Task is reactivated, its retry count + is reset to 0. Reactivation will fail for Tasks that are not completed or that + previously completed successfully (with an exit code of 0). Additionally, it + will fail if the Job has completed (or is terminating or deleting). + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to reactivate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_reactivate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_task_file( # pylint: disable=inconsistent-return-statements + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified Task file from the Compute Node where the Task ran. + + Deletes the specified Task file from the Compute Node where the Task ran. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + """Returns the content of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: Iterator[bytes] + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_task_file_properties( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_properties_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_task_files( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchNodeFile"]: + """Lists the files in a Task's directory on its Compute Node. + + Lists the files in a Task's directory on its Compute Node. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose files you want to list. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of the Task directory. This parameter can be used + in + combination with the filter parameter to list specific type of files. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_task_files_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def create_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user: _azure_batch_models4.BatchNodeUserCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Adds a user Account to the specified Compute Node. + + You can add a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to create a user Account. Required. + :type node_id: str + :param user: The options to use for creating the user. Required. + :type user: ~client.models.BatchNodeUserCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(user, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_node_user_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def delete_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a user Account from the specified Compute Node. + + You can delete a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to delete a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to delete. Required. + :type user_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def replace_node_user( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + user_name: str, + content: _azure_batch_models4.BatchNodeUserUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the password and expiration time of a user Account on the specified Compute Node. + + This operation replaces of all the updatable properties of the Account. For + example, if the expiryTime element is not specified, the current value is + replaced with the default value, not left unmodified. You can update a user + Account on a Compute Node only when it is in the idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to update a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to update. Required. + :type user_name: str + :param content: The options to use for updating the user. Required. + :type content: ~client.models.BatchNodeUserUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNode: + """Gets information about the specified Compute Node. + + Gets information about the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to get information about. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNode. The BatchNode is compatible with MutableMapping + :rtype: ~client.models.BatchNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNode] = kwargs.pop("cls", None) + + _request = build_batch_get_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNode, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def reboot_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeRebootContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Restarts the specified Compute Node. + + You can restart a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for rebooting the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeRebootContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reboot_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def start_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Starts the specified Compute Node. + + You can start a Compute Node only if it has been deallocated. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_start_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def reimage_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeReimageContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Reinstalls the operating system on the specified Compute Node. + + You can reinstall the operating system on a Compute Node only if it is in an + idle or running state. This API can be invoked only on Pools created with the + cloud service configuration property. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for reimaging the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeReimageContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reimage_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def deallocate_node( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeDeallocateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deallocates the specified Compute Node. + + You can deallocate a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for deallocating the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeDeallocateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_deallocate_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def disable_node_scheduling( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models4.BatchNodeDisableSchedulingContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Disables Task scheduling on the specified Compute Node. + + You can disable Task scheduling on a Compute Node only if its current + scheduling state is enabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to disable Task scheduling. + Required. + :type node_id: str + :param parameters: The options to use for disabling scheduling on the Compute Node. Default + value is None. + :type parameters: ~client.models.BatchNodeDisableSchedulingContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_disable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def enable_node_scheduling( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Enables Task scheduling on the specified Compute Node. + + You can enable Task scheduling on a Compute Node only if its current scheduling + state is disabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to enable Task scheduling. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node_remote_login_settings( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNodeRemoteLoginSettings: + """Gets the settings required for remote login to a Compute Node. + + Before you can remotely login to a Compute Node using the remote login settings, + you must create a user Account on the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which to obtain the remote login settings. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchNodeRemoteLoginSettings. The BatchNodeRemoteLoginSettings is compatible with + MutableMapping + :rtype: ~client.models.BatchNodeRemoteLoginSettings + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNodeRemoteLoginSettings] = kwargs.pop("cls", None) + + _request = build_batch_get_node_remote_login_settings_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNodeRemoteLoginSettings, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def upload_node_logs( + self, + pool_id: str, + node_id: str, + content: _azure_batch_models4.UploadBatchServiceLogsContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models4.UploadBatchServiceLogsResult: + """Upload Azure Batch service log files from the specified Compute Node to Azure + Blob Storage. + + This is for gathering Azure Batch service log files in an automated fashion + from Compute Nodes if you are experiencing an error and wish to escalate to + Azure support. The Azure Batch service log files should be shared with Azure + support to aid in debugging issues with the Batch service. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which you want to get the Remote Desktop + Protocol file. Required. + :type node_id: str + :param content: The Azure Batch service log files upload options. Required. + :type content: ~client.models.UploadBatchServiceLogsContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: UploadBatchServiceLogsResult. The UploadBatchServiceLogsResult is compatible with + MutableMapping + :rtype: ~client.models.UploadBatchServiceLogsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models4.UploadBatchServiceLogsResult] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_upload_node_logs_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.UploadBatchServiceLogsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_nodes( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchNode"]: + """Lists the Compute Nodes in the specified Pool. + + Lists the Compute Nodes in the specified Pool. + + :param pool_id: The ID of the Pool from which you want to list Compute Nodes. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNode + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchNode] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNode]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNode], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_node_extension( + self, + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models4.BatchNodeVMExtension: + """Gets information about the specified Compute Node Extension. + + Gets information about the specified Compute Node Extension. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that contains the extensions. Required. + :type node_id: str + :param extension_name: The name of the Compute Node Extension that you want to get information + about. Required. + :type extension_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNodeVMExtension. The BatchNodeVMExtension is compatible with MutableMapping + :rtype: ~client.models.BatchNodeVMExtension + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models4.BatchNodeVMExtension] = kwargs.pop("cls", None) + + _request = build_batch_get_node_extension_request( + pool_id=pool_id, + node_id=node_id, + extension_name=extension_name, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models4.BatchNodeVMExtension, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_node_extensions( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchNodeVMExtension"]: + """Lists the Compute Nodes Extensions in the specified Pool. + + Lists the Compute Nodes Extensions in the specified Pool. + + :param pool_id: The ID of the Pool that contains Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to list extensions. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNodeVMExtension + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchNodeVMExtension] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeVMExtension]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_extensions_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeVMExtension], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def delete_node_file( # pylint: disable=inconsistent-return-statements + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified file from the Compute Node. + + Deletes the specified file from the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def get_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> Iterator[bytes]: + """Returns the content of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: Iterator[bytes] + :rtype: Iterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_node_file_properties( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_properties_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_node_files( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> Iterable["_azure_batch_models4.BatchNodeFile"]: + """Lists all of the files in Task directories on the specified Compute Node. + + Lists all of the files in Task directories on the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node whose files you want to list. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of a directory. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.paging.ItemPaged[~client.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models4.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_files_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models4.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/batch/azure-batch/client/_operations/_patch.py b/sdk/batch/azure-batch/client/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/_patch.py b/sdk/batch/azure-batch/client/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/_serialization.py b/sdk/batch/azure-batch/client/_serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/batch/azure-batch/client/_serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/batch/azure-batch/client/_vendor.py b/sdk/batch/azure-batch/client/_vendor.py new file mode 100644 index 000000000000..396a0128421a --- /dev/null +++ b/sdk/batch/azure-batch/client/_vendor.py @@ -0,0 +1,57 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Optional, TYPE_CHECKING + +from azure.core import MatchConditions + +from ._configuration import BatchClientConfiguration + +if TYPE_CHECKING: + from azure.core import PipelineClient + + from ._serialization import Deserializer, Serializer + + +class BatchClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "PipelineClient" + _config: BatchClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/batch/azure-batch/client/_version.py b/sdk/batch/azure-batch/client/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/batch/azure-batch/client/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/batch/azure-batch/client/aio/__init__.py b/sdk/batch/azure-batch/client/aio/__init__.py new file mode 100644 index 000000000000..b1e0d367b042 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import BatchClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/aio/_client.py b/sdk/batch/azure-batch/client/aio/_client.py new file mode 100644 index 000000000000..6a2133679d27 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_client.py @@ -0,0 +1,102 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._serialization import Deserializer, Serializer +from ._configuration import BatchClientConfiguration +from ._operations import BatchClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class BatchClient(BatchClientOperationsMixin): + """BatchClient. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "{endpoint}" + self._config = BatchClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + kwargs["request_id_header_name"] = "client-request-id" + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/batch/azure-batch/client/aio/_configuration.py b/sdk/batch/azure-batch/client/aio/_configuration.py new file mode 100644 index 000000000000..296a4e20cbe0 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_configuration.py @@ -0,0 +1,65 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class BatchClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for BatchClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Batch account endpoint (for example: + `https://batchaccount.eastus2.batch.azure.com + `_). Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-07-01.20.0". Note that overriding this default value may result in unsupported behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-07-01.20.0") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://batch.core.windows.net//.default"]) + kwargs.setdefault("sdk_moniker", "batch/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/batch/azure-batch/client/aio/_operations/__init__.py b/sdk/batch/azure-batch/client/aio/_operations/__init__.py new file mode 100644 index 000000000000..ea39f177e86d --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/__init__.py @@ -0,0 +1,25 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import BatchClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "BatchClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/batch/azure-batch/client/aio/_operations/_operations.py b/sdk/batch/azure-batch/client/aio/_operations/_operations.py new file mode 100644 index 000000000000..903c36574077 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/_operations.py @@ -0,0 +1,8308 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +import json +import sys +from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, List, Optional, TypeVar +import urllib.parse + +from azure.core import MatchConditions +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceModifiedError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ....azure.batch import models as _azure_batch_models5 +from ..._model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._operations._operations import ( + build_batch_cancel_certificate_deletion_request, + build_batch_create_certificate_request, + build_batch_create_job_request, + build_batch_create_job_schedule_request, + build_batch_create_node_user_request, + build_batch_create_pool_request, + build_batch_create_task_collection_request, + build_batch_create_task_request, + build_batch_deallocate_node_request, + build_batch_delete_certificate_request, + build_batch_delete_job_request, + build_batch_delete_job_schedule_request, + build_batch_delete_node_file_request, + build_batch_delete_node_user_request, + build_batch_delete_pool_request, + build_batch_delete_task_file_request, + build_batch_delete_task_request, + build_batch_disable_job_request, + build_batch_disable_job_schedule_request, + build_batch_disable_node_scheduling_request, + build_batch_disable_pool_auto_scale_request, + build_batch_enable_job_request, + build_batch_enable_job_schedule_request, + build_batch_enable_node_scheduling_request, + build_batch_enable_pool_auto_scale_request, + build_batch_evaluate_pool_auto_scale_request, + build_batch_get_application_request, + build_batch_get_certificate_request, + build_batch_get_job_request, + build_batch_get_job_schedule_request, + build_batch_get_job_task_counts_request, + build_batch_get_node_extension_request, + build_batch_get_node_file_properties_request, + build_batch_get_node_file_request, + build_batch_get_node_remote_login_settings_request, + build_batch_get_node_request, + build_batch_get_pool_request, + build_batch_get_task_file_properties_request, + build_batch_get_task_file_request, + build_batch_get_task_request, + build_batch_job_schedule_exists_request, + build_batch_list_applications_request, + build_batch_list_certificates_request, + build_batch_list_job_preparation_and_release_task_status_request, + build_batch_list_job_schedules_request, + build_batch_list_jobs_from_schedule_request, + build_batch_list_jobs_request, + build_batch_list_node_extensions_request, + build_batch_list_node_files_request, + build_batch_list_nodes_request, + build_batch_list_pool_node_counts_request, + build_batch_list_pool_usage_metrics_request, + build_batch_list_pools_request, + build_batch_list_sub_tasks_request, + build_batch_list_supported_images_request, + build_batch_list_task_files_request, + build_batch_list_tasks_request, + build_batch_pool_exists_request, + build_batch_reactivate_task_request, + build_batch_reboot_node_request, + build_batch_reimage_node_request, + build_batch_remove_nodes_request, + build_batch_replace_job_request, + build_batch_replace_job_schedule_request, + build_batch_replace_node_user_request, + build_batch_replace_pool_properties_request, + build_batch_replace_task_request, + build_batch_resize_pool_request, + build_batch_start_node_request, + build_batch_stop_pool_resize_request, + build_batch_terminate_job_request, + build_batch_terminate_job_schedule_request, + build_batch_terminate_task_request, + build_batch_update_job_request, + build_batch_update_job_schedule_request, + build_batch_update_pool_request, + build_batch_upload_node_logs_request, +) +from .._vendor import BatchClientMixinABC + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class BatchClientOperationsMixin(BatchClientMixinABC): # pylint: disable=too-many-public-methods + + @distributed_trace + def list_applications( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchApplication"]: + """Lists all of the applications available in the specified Account. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :return: An iterator like instance of BatchApplication + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchApplication] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchApplication]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_applications_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchApplication], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_application( + self, + application_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchApplication: + """Gets information about the specified Application. + + This operation returns only Applications and versions that are available for + use on Compute Nodes; that is, that can be used in an Package reference. For + administrator information about Applications and versions that are not yet + available to Compute Nodes, use the Azure portal or the Azure Resource Manager + API. + + :param application_id: The ID of the Application. Required. + :type application_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchApplication. The BatchApplication is compatible with MutableMapping + :rtype: ~client.models.BatchApplication + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchApplication] = kwargs.pop("cls", None) + + _request = build_batch_get_application_request( + application_id=application_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchApplication, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_pool_usage_metrics( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + starttime: Optional[datetime.datetime] = None, + endtime: Optional[datetime.datetime] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchPoolUsageMetrics"]: + """Lists the usage metrics, aggregated by Pool across individual time intervals, + for the specified Account. + + If you do not specify a $filter clause including a poolId, the response + includes all Pools that existed in the Account in the time range of the + returned aggregation intervals. If you do not specify a $filter clause + including a startTime or endTime these filters default to the start and end + times of the last aggregation interval currently available; that is, only the + last aggregation interval is returned. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword starttime: The earliest time from which to include metrics. This must be at least two + and + a half hours before the current time. If not specified this defaults to the + start time of the last aggregation interval currently available. Default value is None. + :paramtype starttime: ~datetime.datetime + :keyword endtime: The latest time from which to include metrics. This must be at least two + hours + before the current time. If not specified this defaults to the end time of the + last aggregation interval currently available. Default value is None. + :paramtype endtime: ~datetime.datetime + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-account-usage-metrics + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolUsageMetrics + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchPoolUsageMetrics] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPoolUsageMetrics]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_usage_metrics_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + starttime=starttime, + endtime=endtime, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPoolUsageMetrics], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_pool( + self, + pool: _azure_batch_models5.BatchPoolCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Pool to the specified Account. + + When naming Pools, avoid including sensitive information such as user names or + secret project names. This information may appear in telemetry logs accessible + to Microsoft Support engineers. + + :param pool: The Pool to be created. Required. + :type pool: ~client.models.BatchPoolCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_pool_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_pools( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchPool"]: + """Lists all of the Pools which be mounted. + + Lists all of the Pools which be mounted. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-pools + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchPool + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchPool] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPool]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pools_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPool], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Pool from the specified Account. + + When you request that a Pool be deleted, the following actions occur: the Pool + state is set to deleting; any ongoing resize operation on the Pool are stopped; + the Batch service starts resizing the Pool to zero Compute Nodes; any Tasks + running on existing Compute Nodes are terminated and requeued (as if a resize + Pool operation had been requested with the default requeue option); finally, + the Pool is removed from the system. Because running Tasks are requeued, the + user can rerun these Tasks by updating their Job to target a different Pool. + The Tasks can then run on the new Pool. If you want to override the requeue + behavior, then you should call resize Pool explicitly to shrink the Pool to + zero size before deleting the Pool. If you call an Update, Patch or Delete API + on a Pool in the deleting state, it will fail with HTTP status code 409 with + error code PoolBeingDeleted. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def pool_exists( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Gets basic properties of a Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_pool_exists_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def get_pool( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchPool: + """Gets information about the specified Pool. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchPool. The BatchPool is compatible with MutableMapping + :rtype: ~client.models.BatchPool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchPool] = kwargs.pop("cls", None) + + _request = build_batch_get_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchPool, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_pool( + self, + pool_id: str, + pool: _azure_batch_models5.BatchPoolUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This only replaces the Pool properties specified in the request. For example, + if the Pool has a StartTask associated with it, and a request does not specify + a StartTask element, then the Pool keeps the existing StartTask. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param pool: The pool properties to update. Required. + :type pool: ~client.models.BatchPoolUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_pool_auto_scale( + self, pool_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> None: + """Disables automatic scaling for a Pool. + + Disables automatic scaling for a Pool. + + :param pool_id: The ID of the Pool on which to disable automatic scaling. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolEnableAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables automatic scaling for a Pool. + + You cannot enable automatic scaling on a Pool if a resize operation is in + progress on the Pool. If automatic scaling of the Pool is currently disabled, + you must specify a valid autoscale formula as part of the request. If automatic + scaling of the Pool is already enabled, you may specify a new autoscale formula + and/or a new evaluation interval. You cannot call this API for the same Pool + more than once every 30 seconds. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for enabling automatic scaling. Required. + :type content: ~client.models.BatchPoolEnableAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_enable_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def evaluate_pool_auto_scale( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolEvaluateAutoScaleContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.AutoScaleRun: + """Gets the result of evaluating an automatic scaling formula on the Pool. + + This API is primarily for validating an autoscale formula, as it simply returns + the result without applying the formula to the Pool. The Pool must have auto + scaling enabled in order to evaluate a formula. + + :param pool_id: The ID of the Pool on which to evaluate the automatic scaling formula. + Required. + :type pool_id: str + :param content: The options to use for evaluating the automatic scaling formula. Required. + :type content: ~client.models.BatchPoolEvaluateAutoScaleContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: AutoScaleRun. The AutoScaleRun is compatible with MutableMapping + :rtype: ~client.models.AutoScaleRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.AutoScaleRun] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_evaluate_pool_auto_scale_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.AutoScaleRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def resize_pool( + self, + pool_id: str, + content: _azure_batch_models5.BatchPoolResizeContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Changes the number of Compute Nodes that are assigned to a Pool. + + You can only resize a Pool when its allocation state is steady. If the Pool is + already resizing, the request fails with status code 409. When you resize a + Pool, the Pool's allocation state changes from steady to resizing. You cannot + resize Pools which are configured for automatic scaling. If you try to do this, + the Batch service returns an error 409. If you resize a Pool downwards, the + Batch service chooses which Compute Nodes to remove. To remove specific Compute + Nodes, use the Pool remove Compute Nodes API instead. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for resizing the pool. Required. + :type content: ~client.models.BatchPoolResizeContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_resize_pool_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def stop_pool_resize( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Stops an ongoing resize operation on the Pool. + + This does not restore the Pool to its previous state before the resize + operation: it only stops any further changes being made, and the Pool maintains + its current state. After stopping, the Pool stabilizes at the number of Compute + Nodes it was at when the stop operation was done. During the stop operation, + the Pool allocation state changes first to stopping and then to steady. A + resize operation need not be an explicit resize Pool request; this API can also + be used to halt the initial sizing of the Pool when it is created. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_stop_pool_resize_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_pool_properties( + self, + pool_id: str, + pool: _azure_batch_models5.BatchPoolReplaceContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Pool. + + This fully replaces all the updatable properties of the Pool. For example, if + the Pool has a StartTask associated with it and if StartTask is not specified + with this request, then the Batch service will remove the existing StartTask. + + :param pool_id: The ID of the Pool to update. Required. + :type pool_id: str + :param pool: The options to use for replacing properties on the pool. Required. + :type pool: ~client.models.BatchPoolReplaceContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(pool, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_pool_properties_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def remove_nodes( + self, + pool_id: str, + content: _azure_batch_models5.BatchNodeRemoveContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Removes Compute Nodes from the specified Pool. + + This operation can only run when the allocation state of the Pool is steady. + When this operation runs, the allocation state changes from steady to resizing. + Each request may remove up to 100 nodes. + + :param pool_id: The ID of the Pool to get. Required. + :type pool_id: str + :param content: The options to use for removing the node. Required. + :type content: ~client.models.BatchNodeRemoveContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_remove_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_supported_images( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchSupportedImage"]: + """Lists all Virtual Machine Images supported by the Azure Batch service. + + Lists all Virtual Machine Images supported by the Azure Batch service. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchSupportedImage + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchSupportedImage] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchSupportedImage]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_supported_images_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchSupportedImage], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_pool_node_counts( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchPoolNodeCounts"]: + """Gets the number of Compute Nodes in each state, grouped by Pool. Note that the + numbers returned may not always be up to date. If you need exact node counts, + use a list query. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-support-images + `_. + Default value is None. + :paramtype filter: str + :return: An iterator like instance of BatchPoolNodeCounts + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchPoolNodeCounts] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchPoolNodeCounts]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_pool_node_counts_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchPoolNodeCounts], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job. + + Deleting a Job also deletes all Tasks that are part of that Job, and all Job + statistics. This also overrides the retention period for Task data; that is, if + the Job contains Tasks which are still retained on Compute Nodes, the Batch + services deletes those Tasks' working directories and all their contents. When + a Delete Job request is received, the Batch service sets the Job to the + deleting state. All update operations on a Job that is in deleting state will + fail with status code 409 (Conflict), with additional information indicating + that the Job is being deleted. + + :param job_id: The ID of the Job to delete. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the Job even if the corresponding nodes have + not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchJob: + """Gets information about the specified Job. + + Gets information about the specified Job. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJob. The BatchJob is compatible with MutableMapping + :rtype: ~client.models.BatchJob + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchJob] = kwargs.pop("cls", None) + + _request = build_batch_get_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchJob, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_job( + self, + job_id: str, + job: _azure_batch_models5.BatchJobUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This replaces only the Job properties specified in the request. For example, if + the Job has constraints, and a request does not specify the constraints + element, then the Job keeps the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: The options to use for updating the Job. Required. + :type job: ~client.models.BatchJobUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_job( + self, + job_id: str, + job: _azure_batch_models5.BatchJob, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job. + + This fully replaces all the updatable properties of the Job. For example, if + the Job has constraints associated with it and if constraints is not specified + with this request, then the Batch service will remove the existing constraints. + + :param job_id: The ID of the Job whose properties you want to update. Required. + :type job_id: str + :param job: A job with updated properties. Required. + :type job: ~client.models.BatchJob + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_job( + self, + job_id: str, + content: _azure_batch_models5.BatchJobDisableContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables the specified Job, preventing new Tasks from running. + + The Batch Service immediately moves the Job to the disabling state. Batch then + uses the disableTasks parameter to determine what to do with the currently + running Tasks of the Job. The Job remains in the disabling state until the + disable operation is completed and all Tasks have been dealt with according to + the disableTasks option; the Job then moves to the disabled state. No new Tasks + are started under the Job until it moves back to active state. If you try to + disable a Job that is in any state other than active, disabling, or disabled, + the request fails with status code 409. + + :param job_id: The ID of the Job to disable. Required. + :type job_id: str + :param content: The options to use for disabling the Job. Required. + :type content: ~client.models.BatchJobDisableContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_disable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_job( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables the specified Job, allowing new Tasks to run. + + When you call this API, the Batch service sets a disabled Job to the enabling + state. After the this operation is completed, the Job moves to the active + state, and scheduling of new Tasks under the Job resumes. The Batch service + does not allow a Task to remain in the active state for more than 180 days. + Therefore, if you enable a Job containing active Tasks which were added more + than 180 days ago, those Tasks will not run. + + :param job_id: The ID of the Job to enable. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def terminate_job( + self, + job_id: str, + parameters: Optional[_azure_batch_models5.BatchJobTerminateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Job, marking it as completed. + + When a Terminate Job request is received, the Batch service sets the Job to the + terminating state. The Batch service then terminates any running Tasks + associated with the Job and runs any required Job release Tasks. Then the Job + moves into the completed state. If there are any Tasks in the Job in the active + state, they will remain in the active state. Once a Job is terminated, new + Tasks cannot be added and any remaining active Tasks will not be scheduled. + + :param job_id: The ID of the Job to terminate. Required. + :type job_id: str + :param parameters: The options to use for terminating the Job. Default value is None. + :type parameters: ~client.models.BatchJobTerminateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the Job even if the corresponding nodes have + not fully processed the termination. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_terminate_job_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def create_job( + self, + job: _azure_batch_models5.BatchJobCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job to the specified Account. + + The Batch service supports two ways to control the work done as part of a Job. + In the first approach, the user specifies a Job Manager Task. The Batch service + launches this Task when it is ready to start the Job. The Job Manager Task + controls all other Tasks that run under this Job, by using the Task APIs. In + the second approach, the user directly controls the execution of Tasks under an + active Job, by using the Task APIs. Also note: when naming Jobs, avoid + including sensitive information such as user names or secret project names. + This information may appear in telemetry logs accessible to Microsoft Support + engineers. + + :param job: The Job to be created. Required. + :type job: ~client.models.BatchJobCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_jobs( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchJob"]: + """Lists all of the Jobs in the specified Account. + + Lists all of the Jobs in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs + `_. Default + value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_jobs_from_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchJob"]: + """Lists the Jobs that have been created under the specified Job Schedule. + + Lists the Jobs that have been created under the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule from which you want to get a list of Jobs. + Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-jobs-in-a-job-schedule + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJob + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJob]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_jobs_from_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJob], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_job_preparation_and_release_task_status( # pylint: disable=name-too-long + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus"]: + """Lists the execution status of the Job Preparation and Job Release Task for the + specified Job across the Compute Nodes where the Job has run. + + This API returns the Job Preparation and Job Release Task status on all Compute + Nodes that have run the Job Preparation or Job Release Task. This includes + Compute Nodes which have since been removed from the Pool. If this API is + invoked on a Job which has no Job Preparation or Job Release Task, the Batch + service returns HTTP status code 409 (Conflict) with an error code of + JobPreparationTaskNotSpecified. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-preparation-and-release-status + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchJobPreparationAndReleaseTaskStatus + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchJobPreparationAndReleaseTaskStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_preparation_and_release_task_status_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_azure_batch_models5.BatchJobPreparationAndReleaseTaskStatus], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_job_task_counts( + self, job_id: str, *, timeout: Optional[int] = None, ocpdate: Optional[datetime.datetime] = None, **kwargs: Any + ) -> _azure_batch_models5.BatchTaskCountsResult: + """Gets the Task counts for the specified Job. + + Task counts provide a count of the Tasks by active, running or completed Task + state, and a count of Tasks which succeeded or failed. Tasks in the preparing + state are counted as running. Note that the numbers returned may not always be + up to date. If you need exact task counts, use a list query. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskCountsResult. The BatchTaskCountsResult is compatible with MutableMapping + :rtype: ~client.models.BatchTaskCountsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchTaskCountsResult] = kwargs.pop("cls", None) + + _request = build_batch_get_job_task_counts_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTaskCountsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def create_certificate( + self, + certificate: _azure_batch_models5.BatchCertificate, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Certificate to the specified Account. + + Creates a Certificate to the specified Account. + + :param certificate: The Certificate to be created. Required. + :type certificate: ~client.models.BatchCertificate + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(certificate, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_certificate_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_certificates( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchCertificate"]: + """Lists all of the Certificates that have been added to the specified Account. + + Lists all of the Certificates that have been added to the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://docs.microsoft.com/en-us/rest/api/batchservice/odata-filters-in-batch#list-certificates + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchCertificate + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchCertificate] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchCertificate]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_certificates_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchCertificate], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def cancel_certificate_deletion( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Cancels a failed deletion of a Certificate from the specified Account. + + If you try to delete a Certificate that is being used by a Pool or Compute + Node, the status of the Certificate changes to deleteFailed. If you decide that + you want to continue using the Certificate, you can use this operation to set + the status of the Certificate back to active. If you intend to delete the + Certificate, you do not need to run this operation after the deletion failed. + You must make sure that the Certificate is not being used by any resources, and + then you can try again to delete the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate being deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_cancel_certificate_deletion_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a Certificate from the specified Account. + + You cannot delete a Certificate if a resource (Pool or Compute Node) is using + it. Before you can delete a Certificate, you must therefore make sure that the + Certificate is not associated with any existing Pools, the Certificate is not + installed on any Nodes (even if you remove a Certificate from a Pool, it is not + removed from existing Compute Nodes in that Pool until they restart), and no + running Tasks depend on the Certificate. If you try to delete a Certificate + that is in use, the deletion fails. The Certificate status changes to + deleteFailed. You can use Cancel Delete Certificate to set the status back to + active if you decide that you want to continue using the Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to be deleted. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_certificate( + self, + thumbprint_algorithm: str, + thumbprint: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchCertificate: + """Gets information about the specified Certificate. + + :param thumbprint_algorithm: The algorithm used to derive the thumbprint parameter. This must + be sha1. Required. + :type thumbprint_algorithm: str + :param thumbprint: The thumbprint of the Certificate to get. Required. + :type thumbprint: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchCertificate. The BatchCertificate is compatible with MutableMapping + :rtype: ~client.models.BatchCertificate + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchCertificate] = kwargs.pop("cls", None) + + _request = build_batch_get_certificate_request( + thumbprint_algorithm=thumbprint_algorithm, + thumbprint=thumbprint, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchCertificate, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def job_schedule_exists( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> bool: + """Checks the specified Job Schedule exists. + + Checks the specified Job Schedule exists. + + :param job_schedule_id: The ID of the Job Schedule which you want to check. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_job_schedule_exists_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 404]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + if response.status_code == 200: + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace_async + async def delete_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Job Schedule from the specified Account. + + When you delete a Job Schedule, this also deletes all Jobs and Tasks under that + schedule. When Tasks are deleted, all the files in their working directories on + the Compute Nodes are also deleted (the retention period is ignored). The Job + Schedule statistics are no longer accessible once the Job Schedule is deleted, + though they are still counted towards Account lifetime statistics. + + :param job_schedule_id: The ID of the Job Schedule to delete. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will delete the JobSchedule even if the corresponding nodes + have not fully processed the deletion. The default value is false. Default value is None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchJobSchedule: + """Gets information about the specified Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to get. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchJobSchedule. The BatchJobSchedule is compatible with MutableMapping + :rtype: ~client.models.BatchJobSchedule + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchJobSchedule] = kwargs.pop("cls", None) + + _request = build_batch_get_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchJobSchedule, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def update_job_schedule( + self, + job_schedule_id: str, + job_schedule: _azure_batch_models5.BatchJobScheduleUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This replaces only the Job Schedule properties specified in the request. For + example, if the schedule property is not specified with this request, then the + Batch service will keep the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: The options to use for updating the Job Schedule. Required. + :type job_schedule: ~client.models.BatchJobScheduleUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_update_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_job_schedule( + self, + job_schedule_id: str, + job_schedule: _azure_batch_models5.BatchJobSchedule, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Job Schedule. + + This fully replaces all the updatable properties of the Job Schedule. For + example, if the schedule property is not specified with this request, then the + Batch service will remove the existing schedule. Changes to a Job Schedule only + impact Jobs created by the schedule after the update has taken place; currently + running Jobs are unaffected. + + :param job_schedule_id: The ID of the Job Schedule to update. Required. + :type job_schedule_id: str + :param job_schedule: A Job Schedule with updated properties. Required. + :type job_schedule: ~client.models.BatchJobSchedule + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Disables a Job Schedule. + + No new Jobs will be created until the Job Schedule is enabled again. + + :param job_schedule_id: The ID of the Job Schedule to disable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_disable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Enables a Job Schedule. + + Enables a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to enable. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def terminate_job_schedule( + self, + job_schedule_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + force: Optional[bool] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates a Job Schedule. + + Terminates a Job Schedule. + + :param job_schedule_id: The ID of the Job Schedule to terminates. Required. + :type job_schedule_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword force: If true, the server will terminate the JobSchedule even if the corresponding + nodes have not fully processed the termination. The default value is false. Default value is + None. + :paramtype force: bool + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_job_schedule_request( + job_schedule_id=job_schedule_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + force=force, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def create_job_schedule( + self, + job_schedule: _azure_batch_models5.BatchJobScheduleCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Job Schedule to the specified Account. + + Creates a Job Schedule to the specified Account. + + :param job_schedule: The Job Schedule to be created. Required. + :type job_schedule: ~client.models.BatchJobScheduleCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(job_schedule, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_job_schedule_request( + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_job_schedules( + self, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchJobSchedule"]: + """Lists all of the Job Schedules in the specified Account. + + Lists all of the Job Schedules in the specified Account. + + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-job-schedules + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchJobSchedule + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchJobSchedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchJobSchedule]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_job_schedules_request( + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchJobSchedule], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_task( + self, + job_id: str, + task: _azure_batch_models5.BatchTaskCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Creates a Task to the specified Job. + + The maximum lifetime of a Task from addition to completion is 180 days. If a + Task has not completed within 180 days of being added it will be terminated by + the Batch service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task is to be created. Required. + :type job_id: str + :param task: The Task to be created. Required. + :type task: ~client.models.BatchTaskCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_tasks( + self, + job_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchTask"]: + """Lists all of the Tasks that are associated with the specified Job. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-tasks + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :return: An iterator like instance of BatchTask + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchTask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchTask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_tasks_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + expand=expand, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchTask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_task_collection( + self, + job_id: str, + task_collection: _azure_batch_models5.BatchTaskGroup, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchTaskAddCollectionResult: + """Adds a collection of Tasks to the specified Job. + + Note that each Task must have a unique ID. The Batch service may not return the + results for each Task in the same order the Tasks were submitted in this + request. If the server times out or the connection is closed during the + request, the request may have been partially or fully processed, or not at all. + In such cases, the user should re-issue the request. Note that it is up to the + user to correctly handle failures when re-issuing a request. For example, you + should use the same Task IDs during a retry so that if the prior operation + succeeded, the retry will not create extra Tasks unexpectedly. If the response + contains any Tasks which failed to add, a client can retry the request. In a + retry, it is most efficient to resubmit only Tasks that failed to add, and to + omit Tasks that were successfully added on the first attempt. The maximum + lifetime of a Task from addition to completion is 180 days. If a Task has not + completed within 180 days of being added it will be terminated by the Batch + service and left in whatever state it was in at that time. + + :param job_id: The ID of the Job to which the Task collection is to be added. Required. + :type job_id: str + :param task_collection: The Tasks to be added. Required. + :type task_collection: ~client.models.BatchTaskGroup + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchTaskAddCollectionResult. The BatchTaskAddCollectionResult is compatible with + MutableMapping + :rtype: ~client.models.BatchTaskAddCollectionResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.BatchTaskAddCollectionResult] = kwargs.pop("cls", None) + + _content = json.dumps(task_collection, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_task_collection_request( + job_id=job_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTaskAddCollectionResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Deletes a Task from the specified Job. + + When a Task is deleted, all of the files in its directory on the Compute Node + where it ran are also deleted (regardless of the retention time). For + multi-instance Tasks, the delete Task operation applies synchronously to the + primary task; subtasks and their files are then deleted asynchronously in the + background. + + :param job_id: The ID of the Job from which to delete the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to delete. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + expand: Optional[List[str]] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchTask: + """Gets information about the specified Task. + + For multi-instance Tasks, information such as affinityId, executionInfo and + nodeInfo refer to the primary Task. Use the list subtasks API to retrieve + information about subtasks. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to get information about. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :keyword expand: An OData $expand clause. Default value is None. + :paramtype expand: list[str] + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: BatchTask. The BatchTask is compatible with MutableMapping + :rtype: ~client.models.BatchTask + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchTask] = kwargs.pop("cls", None) + + _request = build_batch_get_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + select=select, + expand=expand, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchTask, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def replace_task( + self, + job_id: str, + task_id: str, + task: _azure_batch_models5.BatchTask, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Updates the properties of the specified Task. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to update. Required. + :type task_id: str + :param task: The Task to update. Required. + :type task: ~client.models.BatchTask + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(task, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace + def list_sub_tasks( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchSubtask"]: + """Lists all of the subtasks that are associated with the specified multi-instance + Task. + + If the Task is not a multi-instance Task then this returns an empty collection. + + :param job_id: The ID of the Job. Required. + :type job_id: str + :param task_id: The ID of the Task. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchSubtask + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchSubtask] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchSubtask]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_sub_tasks_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchSubtask], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def terminate_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Terminates the specified Task. + + When the Task has been terminated, it moves to the completed state. For + multi-instance Tasks, the terminate Task operation applies synchronously to the + primary task; subtasks are then terminated asynchronously in the background. + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to terminate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_terminate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def reactivate_task( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + etag: Optional[str] = None, + match_condition: Optional[MatchConditions] = None, + **kwargs: Any + ) -> None: + """Reactivates a Task, allowing it to run again even if its retry count has been + exhausted. + + Reactivation makes a Task eligible to be retried again up to its maximum retry + count. The Task's state is changed to active. As the Task is no longer in the + completed state, any previous exit code or failure information is no longer + available after reactivation. Each time a Task is reactivated, its retry count + is reset to 0. Reactivation will fail for Tasks that are not completed or that + previously completed successfully (with an exit code of 0). Additionally, it + will fail if the Job has completed (or is terminating or deleting). + + :param job_id: The ID of the Job containing the Task. Required. + :type job_id: str + :param task_id: The ID of the Task to reactivate. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword etag: check if resource is changed. Set None to skip checking etag. Default value is + None. + :paramtype etag: str + :keyword match_condition: The match condition to use upon the etag. Default value is None. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + if match_condition == MatchConditions.IfNotModified: + error_map[412] = ResourceModifiedError + elif match_condition == MatchConditions.IfPresent: + error_map[412] = ResourceNotFoundError + elif match_condition == MatchConditions.IfMissing: + error_map[412] = ResourceExistsError + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_reactivate_task_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + etag=etag, + match_condition=match_condition, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified Task file from the Compute Node where the Task ran. + + Deletes the specified Task file from the Compute Node where the Task ran. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_task_file( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + """Returns the content of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_task_file_properties( + self, + job_id: str, + task_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Task file. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose file you want to retrieve. Required. + :type task_id: str + :param file_path: The path to the Task file that you want to get the content of. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_task_file_properties_request( + job_id=job_id, + task_id=task_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_task_files( + self, + job_id: str, + task_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchNodeFile"]: + """Lists the files in a Task's directory on its Compute Node. + + Lists the files in a Task's directory on its Compute Node. + + :param job_id: The ID of the Job that contains the Task. Required. + :type job_id: str + :param task_id: The ID of the Task whose files you want to list. Required. + :type task_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-task-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of the Task directory. This parameter can be used + in + combination with the filter parameter to list specific type of files. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_task_files_request( + job_id=job_id, + task_id=task_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def create_node_user( + self, + pool_id: str, + node_id: str, + user: _azure_batch_models5.BatchNodeUserCreateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Adds a user Account to the specified Compute Node. + + You can add a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to create a user Account. Required. + :type node_id: str + :param user: The options to use for creating the user. Required. + :type user: ~client.models.BatchNodeUserCreateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(user, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_create_node_user_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def delete_node_user( + self, + pool_id: str, + node_id: str, + user_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deletes a user Account from the specified Compute Node. + + You can delete a user Account to a Compute Node only when it is in the idle or + running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to delete a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to delete. Required. + :type user_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def replace_node_user( + self, + pool_id: str, + node_id: str, + user_name: str, + content: _azure_batch_models5.BatchNodeUserUpdateContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Updates the password and expiration time of a user Account on the specified Compute Node. + + This operation replaces of all the updatable properties of the Account. For + example, if the expiryTime element is not specified, the current value is + replaced with the default value, not left unmodified. You can update a user + Account on a Compute Node only when it is in the idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the machine on which you want to update a user Account. Required. + :type node_id: str + :param user_name: The name of the user Account to update. Required. + :type user_name: str + :param content: The options to use for updating the user. Required. + :type content: ~client.models.BatchNodeUserUpdateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_replace_node_user_request( + pool_id=pool_id, + node_id=node_id, + user_name=user_name, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNode: + """Gets information about the specified Compute Node. + + Gets information about the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to get information about. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNode. The BatchNode is compatible with MutableMapping + :rtype: ~client.models.BatchNode + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNode] = kwargs.pop("cls", None) + + _request = build_batch_get_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNode, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def reboot_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeRebootContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Restarts the specified Compute Node. + + You can restart a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for rebooting the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeRebootContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reboot_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def start_node( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Starts the specified Compute Node. + + You can start a Compute Node only if it has been deallocated. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_start_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def reimage_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeReimageContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Reinstalls the operating system on the specified Compute Node. + + You can reinstall the operating system on a Compute Node only if it is in an + idle or running state. This API can be invoked only on Pools created with the + cloud service configuration property. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for reimaging the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeReimageContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_reimage_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def deallocate_node( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeDeallocateContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Deallocates the specified Compute Node. + + You can deallocate a Compute Node only if it is in an idle or running state. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to restart. Required. + :type node_id: str + :param parameters: The options to use for deallocating the Compute Node. Default value is None. + :type parameters: ~client.models.BatchNodeDeallocateContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_deallocate_node_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def disable_node_scheduling( + self, + pool_id: str, + node_id: str, + parameters: Optional[_azure_batch_models5.BatchNodeDisableSchedulingContent] = None, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Disables Task scheduling on the specified Compute Node. + + You can disable Task scheduling on a Compute Node only if its current + scheduling state is enabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to disable Task scheduling. + Required. + :type node_id: str + :param parameters: The options to use for disabling scheduling on the Compute Node. Default + value is None. + :type parameters: ~client.models.BatchNodeDisableSchedulingContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[None] = kwargs.pop("cls", None) + + if parameters is not None: + _content = json.dumps(parameters, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + _request = build_batch_disable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def enable_node_scheduling( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> None: + """Enables Task scheduling on the specified Compute Node. + + You can enable Task scheduling on a Compute Node only if its current scheduling + state is disabled. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node on which you want to enable Task scheduling. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_enable_node_scheduling_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["DataServiceId"] = self._deserialize("str", response.headers.get("DataServiceId")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node_remote_login_settings( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNodeRemoteLoginSettings: + """Gets the settings required for remote login to a Compute Node. + + Before you can remotely login to a Compute Node using the remote login settings, + you must create a user Account on the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which to obtain the remote login settings. + Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: BatchNodeRemoteLoginSettings. The BatchNodeRemoteLoginSettings is compatible with + MutableMapping + :rtype: ~client.models.BatchNodeRemoteLoginSettings + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNodeRemoteLoginSettings] = kwargs.pop("cls", None) + + _request = build_batch_get_node_remote_login_settings_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNodeRemoteLoginSettings, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def upload_node_logs( + self, + pool_id: str, + node_id: str, + content: _azure_batch_models5.UploadBatchServiceLogsContent, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> _azure_batch_models5.UploadBatchServiceLogsResult: + """Upload Azure Batch service log files from the specified Compute Node to Azure + Blob Storage. + + This is for gathering Azure Batch service log files in an automated fashion + from Compute Nodes if you are experiencing an error and wish to escalate to + Azure support. The Azure Batch service log files should be shared with Azure + support to aid in debugging issues with the Batch service. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node for which you want to get the Remote Desktop + Protocol file. Required. + :type node_id: str + :param content: The Azure Batch service log files upload options. Required. + :type content: ~client.models.UploadBatchServiceLogsContent + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :return: UploadBatchServiceLogsResult. The UploadBatchServiceLogsResult is compatible with + MutableMapping + :rtype: ~client.models.UploadBatchServiceLogsResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop( + "content_type", _headers.pop("content-type", "application/json; odata=minimalmetadata") + ) + cls: ClsType[_azure_batch_models5.UploadBatchServiceLogsResult] = kwargs.pop("cls", None) + + _content = json.dumps(content, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_batch_upload_node_logs_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.UploadBatchServiceLogsResult, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_nodes( + self, + pool_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchNode"]: + """Lists the Compute Nodes in the specified Pool. + + Lists the Compute Nodes in the specified Pool. + + :param pool_id: The ID of the Pool from which you want to list Compute Nodes. Required. + :type pool_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-nodes-in-a-pool + `_. + Default value is None. + :paramtype filter: str + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNode + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchNode] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNode]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_nodes_request( + pool_id=pool_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNode], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_node_extension( + self, + pool_id: str, + node_id: str, + extension_name: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> _azure_batch_models5.BatchNodeVMExtension: + """Gets information about the specified Compute Node Extension. + + Gets information about the specified Compute Node Extension. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that contains the extensions. Required. + :type node_id: str + :param extension_name: The name of the Compute Node Extension that you want to get information + about. Required. + :type extension_name: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: BatchNodeVMExtension. The BatchNodeVMExtension is compatible with MutableMapping + :rtype: ~client.models.BatchNodeVMExtension + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_azure_batch_models5.BatchNodeVMExtension] = kwargs.pop("cls", None) + + _request = build_batch_get_node_extension_request( + pool_id=pool_id, + node_id=node_id, + extension_name=extension_name, + timeout=timeout, + ocpdate=ocpdate, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_azure_batch_models5.BatchNodeVMExtension, response.json()) + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_node_extensions( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + select: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchNodeVMExtension"]: + """Lists the Compute Nodes Extensions in the specified Pool. + + Lists the Compute Nodes Extensions in the specified Pool. + + :param pool_id: The ID of the Pool that contains Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node that you want to list extensions. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword select: An OData $select clause. Default value is None. + :paramtype select: list[str] + :return: An iterator like instance of BatchNodeVMExtension + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchNodeVMExtension] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeVMExtension]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_extensions_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + select=select, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeVMExtension], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def delete_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> None: + """Deletes the specified file from the Compute Node. + + Deletes the specified file from the Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword recursive: Whether to delete children of a directory. If the filePath parameter + represents + a directory instead of a file, you can set recursive to true to delete the + directory and all of the files and subdirectories in it. If recursive is false + then the directory must be empty or deletion will fail. Default value is None. + :paramtype recursive: bool + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_delete_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + + @distributed_trace_async + async def get_node_file( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + ocp_range: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterator[bytes]: + """Returns the content of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :keyword ocp_range: The byte range to be retrieved. The default is to retrieve the entire file. + The + format is bytes=startRange-endRange. Default value is None. + :paramtype ocp_range: str + :return: AsyncIterator[bytes] + :rtype: AsyncIterator[bytes] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + ocp_range=ocp_range, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", True) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + response_headers["content-type"] = self._deserialize("str", response.headers.get("content-type")) + + deserialized = response.iter_bytes() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_node_file_properties( + self, + pool_id: str, + node_id: str, + file_path: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + if_modified_since: Optional[datetime.datetime] = None, + if_unmodified_since: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> bool: + """Gets the properties of the specified Compute Node file. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node. Required. + :type node_id: str + :param file_path: The path to the file or directory. Required. + :type file_path: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword if_modified_since: A timestamp indicating the last modified time of the resource known + to the + client. The operation will be performed only if the resource on the service has + been modified since the specified time. Default value is None. + :paramtype if_modified_since: ~datetime.datetime + :keyword if_unmodified_since: A timestamp indicating the last modified time of the resource + known to the + client. The operation will be performed only if the resource on the service has + not been modified since the specified time. Default value is None. + :paramtype if_unmodified_since: ~datetime.datetime + :return: bool + :rtype: bool + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_batch_get_node_file_properties_request( + pool_id=pool_id, + node_id=node_id, + file_path=file_path, + timeout=timeout, + ocpdate=ocpdate, + if_modified_since=if_modified_since, + if_unmodified_since=if_unmodified_since, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers["Content-Length"] = self._deserialize("int", response.headers.get("Content-Length")) + response_headers["ETag"] = self._deserialize("str", response.headers.get("ETag")) + response_headers["Last-Modified"] = self._deserialize("rfc-1123", response.headers.get("Last-Modified")) + response_headers["client-request-id"] = self._deserialize("str", response.headers.get("client-request-id")) + response_headers["ocp-batch-file-isdirectory"] = self._deserialize( + "bool", response.headers.get("ocp-batch-file-isdirectory") + ) + response_headers["ocp-batch-file-mode"] = self._deserialize("str", response.headers.get("ocp-batch-file-mode")) + response_headers["ocp-batch-file-url"] = self._deserialize("str", response.headers.get("ocp-batch-file-url")) + response_headers["ocp-creation-time"] = self._deserialize("rfc-1123", response.headers.get("ocp-creation-time")) + response_headers["request-id"] = self._deserialize("str", response.headers.get("request-id")) + + if cls: + return cls(pipeline_response, None, response_headers) # type: ignore + return 200 <= response.status_code <= 299 + + @distributed_trace + def list_node_files( + self, + pool_id: str, + node_id: str, + *, + timeout: Optional[int] = None, + ocpdate: Optional[datetime.datetime] = None, + max_results: Optional[int] = None, + filter: Optional[str] = None, + recursive: Optional[bool] = None, + **kwargs: Any + ) -> AsyncIterable["_azure_batch_models5.BatchNodeFile"]: + """Lists all of the files in Task directories on the specified Compute Node. + + Lists all of the files in Task directories on the specified Compute Node. + + :param pool_id: The ID of the Pool that contains the Compute Node. Required. + :type pool_id: str + :param node_id: The ID of the Compute Node whose files you want to list. Required. + :type node_id: str + :keyword timeout: The maximum time that the server can spend processing the request, in + seconds. The default is 30 seconds. If the value is larger than 30, the default will be used + instead.". Default value is None. + :paramtype timeout: int + :keyword ocpdate: The time the request was issued. Client libraries typically set this to the + current system clock time; set it explicitly if you are calling the REST API + directly. Default value is None. + :paramtype ocpdate: ~datetime.datetime + :keyword max_results: The maximum number of items to return in the response. A maximum of 1000 + applications can be returned. Default value is None. + :paramtype max_results: int + :keyword filter: An OData $filter clause. For more information on constructing this filter, see + `https://learn.microsoft.com/rest/api/batchservice/odata-filters-in-batch#list-compute-node-files + `_. + Default value is None. + :paramtype filter: str + :keyword recursive: Whether to list children of a directory. Default value is None. + :paramtype recursive: bool + :return: An iterator like instance of BatchNodeFile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~client.models.BatchNodeFile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_azure_batch_models5.BatchNodeFile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_batch_list_node_files_request( + pool_id=pool_id, + node_id=node_id, + timeout=timeout, + ocpdate=ocpdate, + max_results=max_results, + filter=filter, + recursive=recursive, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.endpoint", self._config.endpoint, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_azure_batch_models5.BatchNodeFile], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("odata.nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize(_azure_batch_models3.BatchError, response.json()) + raise HttpResponseError(response=response, model=error) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/batch/azure-batch/client/aio/_operations/_patch.py b/sdk/batch/azure-batch/client/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/aio/_patch.py b/sdk/batch/azure-batch/client/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/batch/azure-batch/client/aio/_vendor.py b/sdk/batch/azure-batch/client/aio/_vendor.py new file mode 100644 index 000000000000..baee6ee7264c --- /dev/null +++ b/sdk/batch/azure-batch/client/aio/_vendor.py @@ -0,0 +1,57 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import Optional, TYPE_CHECKING + +from azure.core import MatchConditions + +from ._configuration import BatchClientConfiguration + +if TYPE_CHECKING: + from azure.core import AsyncPipelineClient + + from .._serialization import Deserializer, Serializer + + +class BatchClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "AsyncPipelineClient" + _config: BatchClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" + + +def quote_etag(etag: Optional[str]) -> Optional[str]: + if not etag or etag == "*": + return etag + if etag.startswith("W/"): + return etag + if etag.startswith('"') and etag.endswith('"'): + return etag + if etag.startswith("'") and etag.endswith("'"): + return etag + return '"' + etag + '"' + + +def prep_if_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfNotModified: + if_match = quote_etag(etag) if etag else None + return if_match + if match_condition == MatchConditions.IfPresent: + return "*" + return None + + +def prep_if_none_match(etag: Optional[str], match_condition: Optional[MatchConditions]) -> Optional[str]: + if match_condition == MatchConditions.IfModified: + if_none_match = quote_etag(etag) if etag else None + return if_none_match + if match_condition == MatchConditions.IfMissing: + return "*" + return None diff --git a/sdk/batch/azure-batch/client/py.typed b/sdk/batch/azure-batch/client/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/batch/azure-batch/client/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/batch/azure-batch/setup.py b/sdk/batch/azure-batch/setup.py index c8218eb0bece..4973ebecd551 100644 --- a/sdk/batch/azure-batch/setup.py +++ b/sdk/batch/azure-batch/setup.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# coding: utf-8 + import os import re @@ -29,7 +29,7 @@ setup( name=PACKAGE_NAME, version=version, - description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", license="MIT License", @@ -59,7 +59,7 @@ ), include_package_data=True, package_data={ - "azure.batch": ["py.typed"], + "client": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", diff --git a/sdk/batch/azure-batch/tests/test_batch.py b/sdk/batch/azure-batch/tests/test_batch.py index 51e33cb2465b..fedd042965d0 100644 --- a/sdk/batch/azure-batch/tests/test_batch.py +++ b/sdk/batch/azure-batch/tests/test_batch.py @@ -1,4 +1,4 @@ -# pylint: disable=too-many-lines +# pylint: disable=too-many-lines,line-too-long,useless-suppression # coding: utf-8 # ------------------------------------------------------------------------- @@ -85,7 +85,7 @@ async def assertCreateTasksError(self, code, func, *args, **kwargs): pytest.fail("Inner BatchErrorException expected but not exist") except Exception as err: pytest.fail("Expected CreateTasksError, instead got: {!r}".format(err)) - + @CachedResourceGroupPreparer(location=AZURE_LOCATION) @AccountPreparer(location=AZURE_LOCATION, batch_environment=BATCH_ENVIRONMENT) @pytest.mark.parametrize("BatchClient", [SyncBatchClient, AsyncBatchClient], ids=["sync", "async"]) @@ -1184,4 +1184,4 @@ async def test_batch_jobs(self, client: BatchClient, **kwargs): # Test Delete Job response = await wrap_result(client.delete_job(job_auto_param.id)) - assert response is None + assert response is None \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in b/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in index 6ebb7e55a108..32f595395d47 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in +++ b/sdk/loadtesting/azure-developer-loadtesting/MANIFEST.in @@ -4,4 +4,4 @@ include azure/developer/loadtesting/py.typed recursive-include tests *.py recursive-include samples *.py *.md include azure/__init__.py -include azure/developer/__init__.py \ No newline at end of file +include azure/developer/__init__.py diff --git a/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json b/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json new file mode 100644 index 000000000000..7499e5821d9c --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/apiview-properties.json @@ -0,0 +1,101 @@ +{ + "CrossLanguagePackageId": "Microsoft.LoadTestService", + "CrossLanguageDefinitionId": { + "microsoft.loadtestservice.models.AppComponent": "Microsoft.LoadTestService.AppComponent", + "microsoft.loadtestservice.models.ArtifactsContainerInfo": "Microsoft.LoadTestService.ArtifactsContainerInfo", + "microsoft.loadtestservice.models.AutoStopCriteria": "Microsoft.LoadTestService.AutoStopCriteria", + "microsoft.loadtestservice.models.CertificateMetadata": "Microsoft.LoadTestService.CertificateMetadata", + "microsoft.loadtestservice.models.DimensionFilter": "Microsoft.LoadTestService.DimensionFilter", + "microsoft.loadtestservice.models.DimensionValue": "Microsoft.LoadTestService.DimensionValue", + "microsoft.loadtestservice.models.ErrorDetails": "Microsoft.LoadTestService.ErrorDetails", + "microsoft.loadtestservice.models.FunctionFlexConsumptionResourceConfiguration": "Microsoft.LoadTestService.FunctionFlexConsumptionResourceConfiguration", + "microsoft.loadtestservice.models.TargetResourceConfigurations": "Microsoft.LoadTestService.TargetResourceConfigurations", + "microsoft.loadtestservice.models.FunctionFlexConsumptionTargetResourceConfigurations": "Microsoft.LoadTestService.FunctionFlexConsumptionTargetResourceConfigurations", + "microsoft.loadtestservice.models.LoadTestConfiguration": "Microsoft.LoadTestService.LoadTestConfiguration", + "microsoft.loadtestservice.models.MetricAvailability": "Microsoft.LoadTestService.MetricAvailability", + "microsoft.loadtestservice.models.MetricDefinition": "Microsoft.LoadTestService.MetricDefinition", + "microsoft.loadtestservice.models.MetricDefinitionCollection": "Microsoft.LoadTestService.MetricDefinitionCollection", + "microsoft.loadtestservice.models.MetricNamespace": "Microsoft.LoadTestService.MetricNamespace", + "microsoft.loadtestservice.models.MetricNamespaceCollection": "Microsoft.LoadTestService.MetricNamespaceCollection", + "microsoft.loadtestservice.models.MetricRequestPayload": "Microsoft.LoadTestService.MetricRequestPayload", + "microsoft.loadtestservice.models.MetricValue": "Microsoft.LoadTestService.MetricValue", + "microsoft.loadtestservice.models.NameAndDescription": "Microsoft.LoadTestService.NameAndDescription", + "microsoft.loadtestservice.models.OptionalLoadTestConfiguration": "Microsoft.LoadTestService.OptionalLoadTestConfiguration", + "microsoft.loadtestservice.models.PassFailCriteria": "Microsoft.LoadTestService.PassFailCriteria", + "microsoft.loadtestservice.models.PassFailMetric": "Microsoft.LoadTestService.PassFailMetric", + "microsoft.loadtestservice.models.PassFailServerMetric": "Microsoft.LoadTestService.PassFailServerMetric", + "microsoft.loadtestservice.models.RegionalConfiguration": "Microsoft.LoadTestService.RegionalConfiguration", + "microsoft.loadtestservice.models.ResourceMetric": "Microsoft.LoadTestService.ResourceMetric", + "microsoft.loadtestservice.models.Secret": "Microsoft.LoadTestService.Secret", + "microsoft.loadtestservice.models.Test": "Microsoft.LoadTestService.Test", + "microsoft.loadtestservice.models.TestAppComponents": "Microsoft.LoadTestService.TestAppComponents", + "microsoft.loadtestservice.models.TestFileInfo": "Microsoft.LoadTestService.TestFileInfo", + "microsoft.loadtestservice.models.TestInputArtifacts": "Microsoft.LoadTestService.TestInputArtifacts", + "microsoft.loadtestservice.models.TestProfile": "Microsoft.LoadTestService.TestProfile", + "microsoft.loadtestservice.models.TestProfileRun": "Microsoft.LoadTestService.TestProfileRun", + "microsoft.loadtestservice.models.TestProfileRunRecommendation": "Microsoft.LoadTestService.TestProfileRunRecommendation", + "microsoft.loadtestservice.models.TestRun": "Microsoft.LoadTestService.TestRun", + "microsoft.loadtestservice.models.TestRunAppComponents": "Microsoft.LoadTestService.TestRunAppComponents", + "microsoft.loadtestservice.models.TestRunArtifacts": "Microsoft.LoadTestService.TestRunArtifacts", + "microsoft.loadtestservice.models.TestRunDetail": "Microsoft.LoadTestService.TestRunDetail", + "microsoft.loadtestservice.models.TestRunFileInfo": "Microsoft.LoadTestService.TestRunFileInfo", + "microsoft.loadtestservice.models.TestRunInputArtifacts": "Microsoft.LoadTestService.TestRunInputArtifacts", + "microsoft.loadtestservice.models.TestRunOutputArtifacts": "Microsoft.LoadTestService.TestRunOutputArtifacts", + "microsoft.loadtestservice.models.TestRunServerMetricsConfiguration": "Microsoft.LoadTestService.TestRunServerMetricsConfiguration", + "microsoft.loadtestservice.models.TestRunStatistics": "Microsoft.LoadTestService.TestRunStatistics", + "microsoft.loadtestservice.models.TestServerMetricsConfiguration": "Microsoft.LoadTestService.TestServerMetricsConfiguration", + "microsoft.loadtestservice.models.TimeSeriesElement": "Microsoft.LoadTestService.TimeSeriesElement", + "customizations.models.PFMetrics": "Microsoft.LoadTestService.PFMetrics", + "customizations.models.PassFailAggregationFunction": "Microsoft.LoadTestService.PassFailAggregationFunction", + "customizations.models.PassFailAction": "Microsoft.LoadTestService.PassFailAction", + "customizations.models.PassFailResult": "Microsoft.LoadTestService.PassFailResult", + "customizations.models.SecretType": "Microsoft.LoadTestService.SecretType", + "customizations.models.CertificateType": "Microsoft.LoadTestService.CertificateType", + "customizations.models.FileType": "Microsoft.LoadTestService.FileType", + "customizations.models.FileValidationStatus": "Microsoft.LoadTestService.FileValidationStatus", + "customizations.models.TestKind": "Microsoft.LoadTestService.TestKind", + "customizations.models.ManagedIdentityType": "Microsoft.LoadTestService.ManagedIdentityType", + "customizations.models.ResourceKind": "Microsoft.LoadTestService.ResourceKind", + "customizations.models.PassFailTestResult": "Microsoft.LoadTestService.PassFailTestResult", + "customizations.models.TestRunStatus": "Microsoft.LoadTestService.TestRunStatus", + "customizations.models.RequestDataLevel": "Microsoft.LoadTestService.RequestDataLevel", + "customizations.models.CreatedByType": "Microsoft.LoadTestService.CreatedByType", + "customizations.models.TimeGrain": "Microsoft.LoadTestService.TimeGrain", + "customizations.models.Aggregation": "Microsoft.LoadTestService.Aggregation", + "customizations.models.MetricUnit": "Microsoft.LoadTestService.MetricUnit", + "customizations.models.TestProfileRunStatus": "Microsoft.LoadTestService.TestProfileRunStatus", + "customizations.models.RecommendationCategory": "Microsoft.LoadTestService.RecommendationCategory", + "customizations.LoadTestAdministrationClient.create_or_update_test": "Customizations.AdministrationOperations.createOrUpdateTest", + "customizations.LoadTestAdministrationClient.create_or_update_app_components": "Customizations.AdministrationOperations.createOrUpdateAppComponents", + "customizations.LoadTestAdministrationClient.create_or_update_server_metrics_config": "Customizations.AdministrationOperations.createOrUpdateServerMetricsConfig", + "customizations.LoadTestAdministrationClient.get_app_components": "Customizations.AdministrationOperations.getAppComponents", + "customizations.LoadTestAdministrationClient.get_server_metrics_config": "Customizations.AdministrationOperations.getServerMetricsConfig", + "customizations.LoadTestAdministrationClient.get_test": "Customizations.AdministrationOperations.getTest", + "customizations.LoadTestAdministrationClient.get_test_file": "Customizations.AdministrationOperations.getTestFile", + "customizations.LoadTestAdministrationClient.list_test_files": "Customizations.AdministrationOperations.listTestFiles", + "customizations.LoadTestAdministrationClient.list_tests": "Customizations.AdministrationOperations.listTests", + "customizations.LoadTestAdministrationClient.delete_test_file": "Customizations.AdministrationOperations.deleteTestFile", + "customizations.LoadTestAdministrationClient.delete_test": "Customizations.AdministrationOperations.deleteTest", + "customizations.LoadTestAdministrationClient.create_or_update_test_profile": "Customizations.AdministrationOperations.createOrUpdateTestProfile", + "customizations.LoadTestAdministrationClient.delete_test_profile": "Customizations.AdministrationOperations.deleteTestProfile", + "customizations.LoadTestAdministrationClient.get_test_profile": "Customizations.AdministrationOperations.getTestProfile", + "customizations.LoadTestAdministrationClient.list_test_profiles": "Customizations.AdministrationOperations.listTestProfiles", + "customizations.LoadTestRunClient.create_or_update_app_components": "Customizations.TestRunOperations.createOrUpdateAppComponents", + "customizations.LoadTestRunClient.create_or_update_server_metrics_config": "Customizations.TestRunOperations.createOrUpdateServerMetricsConfig", + "customizations.LoadTestRunClient.delete_test_run": "Customizations.TestRunOperations.deleteTestRun", + "customizations.LoadTestRunClient.get_app_components": "Customizations.TestRunOperations.getAppComponents", + "customizations.LoadTestRunClient.get_server_metrics_config": "Customizations.TestRunOperations.getServerMetricsConfig", + "customizations.LoadTestRunClient.get_test_run": "Customizations.TestRunOperations.getTestRun", + "customizations.LoadTestRunClient.get_test_run_file": "Customizations.TestRunOperations.getTestRunFile", + "customizations.LoadTestRunClient.list_metric_dimension_values": "Customizations.TestRunOperations.listMetricDimensionValues", + "customizations.LoadTestRunClient.get_metric_definitions": "Customizations.TestRunOperations.listMetricDefinitions", + "customizations.LoadTestRunClient.get_metric_namespaces": "Customizations.TestRunOperations.listMetricNamespaces", + "customizations.LoadTestRunClient.list_metrics": "Customizations.TestRunOperations.listMetrics", + "customizations.LoadTestRunClient.list_test_runs": "Customizations.TestRunOperations.listTestRuns", + "customizations.LoadTestRunClient.stop_test_run": "Customizations.TestRunOperations.stop", + "customizations.LoadTestRunClient.delete_test_profile_run": "Customizations.TestRunOperations.deleteTestProfileRun", + "customizations.LoadTestRunClient.get_test_profile_run": "Customizations.TestRunOperations.getTestProfileRun", + "customizations.LoadTestRunClient.list_test_profile_runs": "Customizations.TestRunOperations.listTestProfileRuns", + "customizations.LoadTestRunClient.stop_test_profile_run": "Customizations.TestRunOperations.stopTestProfileRun" + } +} \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py new file mode 100644 index 000000000000..31d5b087f062 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/__init__.py @@ -0,0 +1,34 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import LoadTestAdministrationClient # type: ignore +from ._client import LoadTestRunClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClient", + "LoadTestRunClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py new file mode 100644 index 000000000000..3e5aa0fba177 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_client.py @@ -0,0 +1,174 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import PipelineClient +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse + +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration +from ._operations import LoadTestAdministrationClientOperationsMixin, LoadTestRunClientOperationsMixin +from ._serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class LoadTestAdministrationClient(LoadTestAdministrationClientOperationsMixin): + """LoadTestAdministrationClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestAdministrationClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) + + +class LoadTestRunClient(LoadTestRunClientOperationsMixin): + """LoadTestRunClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestRunClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: PipelineClient = PipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py new file mode 100644 index 000000000000..adf093acd20f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_configuration.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + + +class LoadTestAdministrationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for LoadTestAdministrationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) + + +class LoadTestRunClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for LoadTestRunClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "TokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_model_base.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_model_base.py new file mode 100644 index 000000000000..065b17f67c46 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_model_base.py @@ -0,0 +1,1236 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): # pylint: disable=unsubscriptable-object + def __init__(self, data: typing.Dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> typing.Tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field( + attr_to_rest_field: typing.Dict[str, "_RestField"], rest_name: str +) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: typing.Set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: # pylint: disable=too-many-nested-blocks + if isinstance(args[0], ET.Element): + existed_attr_keys = [] + model_meta = getattr(self, "_xml", {}) + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and args[0].get(xml_name) is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + if prop_meta.get("itemsName"): + xml_name = prop_meta.get("itemsName") + xml_ns = prop_meta.get("itemNs") + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = args[0].findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, items) + continue + + # text element is primitive type + if prop_meta.get("text", False): + if args[0].text is not None: + dict_to_pass[rf._rest_name] = _deserialize(rf._type, args[0].text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = args[0].find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + dict_to_pass[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in args[0]: + if e.tag not in existed_attr_keys: + dict_to_pass[e.tag] = _convert_element(e) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: typing.Dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: typing.Dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) # pylint: disable=no-value-for-parameter + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = prop_meta.get("ns", model_meta.get("ns", None)) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> typing.Dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: typing.Dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: typing.List[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: typing.List[typing.Any]) -> typing.List[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a for a in annotation.__args__ if a == type(None)): # pyright: ignore + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a != type(None)), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a != type(None)] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + if annotation._name == "Dict": # pyright: ignore + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + if annotation._name in ["List", "Set", "Tuple", "Sequence"]: # pyright: ignore + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value) + except ValueError: + # for unknown value, return raw value + return value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, value, module, rf, format) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + value: typing.Any, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, value) + except DeserializationError: + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + return getattr(self._type, "args", [None])[0] + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + item = obj.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + return _deserialize(self._type, _serialize(item, self._format), rf=self) + + def __set__(self, obj: Model, value) -> None: + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[typing.List[str]] = None, + xml: typing.Optional[typing.Dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[typing.Dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, typing.List[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + wrapped_element = _create_xml_element( + model_meta.get("name", o.__class__.__name__), + model_meta.get("prefix"), + model_meta.get("ns"), + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # if no ns for prop, use model's + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + xml_name = prop_meta.get("name", k) + if prop_meta.get("ns"): + ET.register_namespace(prop_meta.get("prefix"), prop_meta.get("ns")) # pyright: ignore + xml_name = "{" + prop_meta.get("ns") + "}" + xml_name # pyright: ignore + # attribute should be primitive type + wrapped_element.set(xml_name, _get_primitive_type_value(v)) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": parent_meta.get("ns") if parent_meta else None, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": parent_meta.get("itemsNs", parent_meta.get("ns")), + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[typing.Dict[str, typing.Any]], +) -> ET.Element: + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, meta.get("ns") if meta else None + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _create_xml_element(tag, prefix=None, ns=None): + if prefix and ns: + ET.register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: typing.Dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: typing.List[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py new file mode 100644 index 000000000000..93b9c55d70d8 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/__init__.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import LoadTestAdministrationClientOperationsMixin # type: ignore +from ._operations import LoadTestRunClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClientOperationsMixin", + "LoadTestRunClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py new file mode 100644 index 000000000000..617cc2cddf24 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_operations.py @@ -0,0 +1,4494 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from io import IOBase +import json +import sys +from typing import Any, Callable, Dict, IO, Iterable, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict + +from ...microsoft.loadtestservice import models as _microsoft_loadtestservice_models4 +from .._model_base import SdkJSONEncoder, _deserialize +from .._serialization import Serializer +from .._validation import api_version_validation +from .._vendor import LoadTestAdministrationClientMixinABC, LoadTestRunClientMixinABC + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_load_test_administration_create_or_update_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_app_components_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/app-components" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_server_metrics_config_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/server-metrics-config" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_app_components_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/app-components" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_server_metrics_config_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/server-metrics-config" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_file_request( # pylint: disable=name-too-long + test_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_test_files_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_tests_request( # pylint: disable=name-too-long + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") + if search is not None: + _params["search"] = _SERIALIZER.query("search", search, "str") + if last_modified_start_time is not None: + _params["lastModifiedStartTime"] = _SERIALIZER.query( + "last_modified_start_time", last_modified_start_time, "iso-8601" + ) + if last_modified_end_time is not None: + _params["lastModifiedEndTime"] = _SERIALIZER.query("last_modified_end_time", last_modified_end_time, "iso-8601") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_begin_upload_test_file_request( # pylint: disable=name-too-long + test_id: str, + file_name: str, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models4.FileType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: str = kwargs.pop("content_type") + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if file_type is not None: + _params["fileType"] = _SERIALIZER.query("file_type", file_type, "str") + + # Construct headers + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_file_request( # pylint: disable=name-too-long + test_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}/files/{fileName}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_request( # pylint: disable=name-too-long + test_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/tests/{testId}" + path_format_arguments = { + "testId": _SERIALIZER.url("test_id", test_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_create_or_update_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_delete_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_get_test_profile_request( # pylint: disable=name-too-long + test_profile_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles/{testProfileId}" + path_format_arguments = { + "testProfileId": _SERIALIZER.url("test_profile_id", test_profile_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_administration_list_test_profiles_request( # pylint: disable=name-too-long + *, + maxpagesize: Optional[int] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profiles" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if last_modified_start_time is not None: + _params["lastModifiedStartTime"] = _SERIALIZER.query( + "last_modified_start_time", last_modified_start_time, "iso-8601" + ) + if last_modified_end_time is not None: + _params["lastModifiedEndTime"] = _SERIALIZER.query("last_modified_end_time", last_modified_end_time, "iso-8601") + if test_profile_ids is not None: + _params["testProfileIds"] = _SERIALIZER.query("test_profile_ids", test_profile_ids, "[str]", div=",") + if test_ids is not None: + _params["testIds"] = _SERIALIZER.query("test_ids", test_ids, "[str]", div=",") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_begin_test_run_request( # pylint: disable=name-too-long + test_run_id: str, *, old_test_run_id: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if old_test_run_id is not None: + _params["oldTestRunId"] = _SERIALIZER.query("old_test_run_id", old_test_run_id, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_create_or_update_app_components_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/app-components" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_create_or_update_server_metrics_config_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/server-metrics-config" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["content-type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_delete_test_run_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_app_components_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/app-components" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_server_metrics_config_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/server-metrics-config" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_run_request(test_run_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_run_file_request( # pylint: disable=name-too-long + test_run_id: str, file_name: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/files/{fileName}" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + "fileName": _SERIALIZER.url("file_name", file_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_metric_dimension_values_request( # pylint: disable=name-too-long + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-dimensions/{name}/values" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["metricname"] = _SERIALIZER.query("metric_name", metric_name, "str") + if interval is not None: + _params["interval"] = _SERIALIZER.query("interval", interval, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + _params["timespan"] = _SERIALIZER.query("time_interval", time_interval, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_metric_definitions_request( # pylint: disable=name-too-long + test_run_id: str, *, metric_namespace: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-definitions" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_metric_namespaces_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metric-namespaces" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_metrics_request( + test_run_id: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}/metrics" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if aggregation is not None: + _params["aggregation"] = _SERIALIZER.query("aggregation", aggregation, "str") + _params["metricname"] = _SERIALIZER.query("metric_name", metric_name, "str") + if interval is not None: + _params["interval"] = _SERIALIZER.query("interval", interval, "str") + _params["metricNamespace"] = _SERIALIZER.query("metric_namespace", metric_namespace, "str") + _params["timespan"] = _SERIALIZER.query("time_interval", time_interval, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_test_runs_request( # pylint: disable=name-too-long + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + maxpagesize: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if orderby is not None: + _params["orderby"] = _SERIALIZER.query("orderby", orderby, "str") + if search is not None: + _params["search"] = _SERIALIZER.query("search", search, "str") + if test_id is not None: + _params["testId"] = _SERIALIZER.query("test_id", test_id, "str") + if execution_from is not None: + _params["executionFrom"] = _SERIALIZER.query("execution_from", execution_from, "iso-8601") + if execution_to is not None: + _params["executionTo"] = _SERIALIZER.query("execution_to", execution_to, "iso-8601") + if status is not None: + _params["status"] = _SERIALIZER.query("status", status, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_stop_test_run_request( # pylint: disable=name-too-long + test_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-runs/{testRunId}:stop" + path_format_arguments = { + "testRunId": _SERIALIZER.url("test_run_id", test_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_begin_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_delete_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_get_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_list_test_profile_runs_request( # pylint: disable=name-too-long + *, + maxpagesize: Optional[int] = None, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if maxpagesize is not None: + _params["maxpagesize"] = _SERIALIZER.query("maxpagesize", maxpagesize, "int") + if min_start_date_time is not None: + _params["minStartDateTime"] = _SERIALIZER.query("min_start_date_time", min_start_date_time, "iso-8601") + if max_start_date_time is not None: + _params["maxStartDateTime"] = _SERIALIZER.query("max_start_date_time", max_start_date_time, "iso-8601") + if min_end_date_time is not None: + _params["minEndDateTime"] = _SERIALIZER.query("min_end_date_time", min_end_date_time, "iso-8601") + if max_end_date_time is not None: + _params["maxEndDateTime"] = _SERIALIZER.query("max_end_date_time", max_end_date_time, "iso-8601") + if created_date_start_time is not None: + _params["createdDateStartTime"] = _SERIALIZER.query( + "created_date_start_time", created_date_start_time, "iso-8601" + ) + if created_date_end_time is not None: + _params["createdDateEndTime"] = _SERIALIZER.query("created_date_end_time", created_date_end_time, "iso-8601") + if test_profile_run_ids is not None: + _params["testProfileRunIds"] = _SERIALIZER.query("test_profile_run_ids", test_profile_run_ids, "[str]", div=",") + if test_profile_ids is not None: + _params["testProfileIds"] = _SERIALIZER.query("test_profile_ids", test_profile_ids, "[str]", div=",") + if statuses is not None: + _params["statuses"] = _SERIALIZER.query("statuses", statuses, "[str]", div=",") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_load_test_run_stop_test_profile_run_request( # pylint: disable=name-too-long + test_profile_run_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2024-12-01-preview")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/test-profile-runs/{testProfileRunId}:stop" + path_format_arguments = { + "testProfileRunId": _SERIALIZER.url("test_profile_run_id", test_profile_run_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class LoadTestAdministrationClientOperationsMixin( # pylint: disable=name-too-long + LoadTestAdministrationClientMixinABC +): + + @overload + def create_or_update_test( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.Test, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: ~customizations.models.Test + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_test( + self, test_id: str, body: Union[_microsoft_loadtestservice_models4.Test, JSON, IO[bytes]], **kwargs: Any + ) -> _microsoft_loadtestservice_models4.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Is one of the following types: Test, JSON, IO[bytes] + Required. + :type body: ~customizations.models.Test or JSON or IO[bytes] + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.Test] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_app_components( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.TestAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: ~customizations.models.TestAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_app_components( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models4.TestAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Is one of the following types: TestAppComponents, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestAppComponents or JSON or IO[bytes] + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_app_components_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_server_metrics_config( + self, + test_id: str, + body: _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: ~customizations.models.TestServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_server_metrics_config( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Is one of the following types: + TestServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~customizations.models.TestServerMetricsConfiguration or JSON or IO[bytes] + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_server_metrics_config_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_app_components(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestAppComponents: + """Get associated app component (collection of azure resources) for the given test. + + Get associated app component (collection of azure resources) for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_app_components_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_server_metrics_config( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestServerMetricsConfiguration: + """List server metrics configuration for the given test. + + List server metrics configuration for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_server_metrics_config_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.Test: + """Get load test details by test Id. + + Get load test details by test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.Test] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_file( + self, test_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestFileInfo: + """Get all the files that are associated with a test. + + Get all the files that are associated with a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_test_files( + self, test_id: str, **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TestFileInfo"]: + """Get all test files. + + Get all test files. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: An iterator like instance of TestFileInfo + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TestFileInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_microsoft_loadtestservice_models4.TestFileInfo]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_files_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestFileInfo], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_tests( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.Test"]: + """Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: + lastModifiedDateTime asc. Supported fields - lastModifiedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - displayName, + createdBy. For example, to search for a test, with display name is Login Test, + the search parameter can be Login. Default value is None. + :paramtype search: str + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter tests. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter tests. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :return: An iterator like instance of Test + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.Test] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.Test]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_tests_request( + orderby=orderby, + search=search, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models4.Test], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def _begin_upload_test_file( + self, + test_id: str, + file_name: str, + body: bytes, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models4.FileType]] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestFileInfo: + """Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Unique name for test file with file extension like : App.jmx. Required. + :type file_name: str + :param body: The file content as application/octet-stream. Required. + :type body: bytes + :keyword file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". Default value + is None. + :paramtype file_type: str or ~customizations.models.FileType + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("content-type", "application/octet-stream")) + cls: ClsType[_microsoft_loadtestservice_models4.TestFileInfo] = kwargs.pop("cls", None) + + _content = body + + _request = build_load_test_administration_begin_upload_test_file_request( + test_id=test_id, + file_name=file_name, + file_type=file_type, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_test_file( # pylint: disable=inconsistent-return-statements + self, test_id: str, file_name: str, **kwargs: Any + ) -> None: + """Delete file by the file name for a test. + + Delete file by the file name for a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def delete_test(self, test_id: str, **kwargs: Any) -> None: # pylint: disable=inconsistent-return-statements + """Delete a test by its test Id. + + Delete a test by its test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + def create_or_update_test_profile( + self, + test_profile_id: str, + body: _microsoft_loadtestservice_models4.TestProfile, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: ~customizations.models.TestProfile + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test_profile( + self, test_profile_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_test_profile( + self, + test_profile_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "content_type", "accept"]}, + ) + def create_or_update_test_profile( + self, + test_profile_id: str, + body: Union[_microsoft_loadtestservice_models4.TestProfile, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Is one of the following types: TestProfile, JSON, IO[bytes] + Required. + :type body: ~customizations.models.TestProfile or JSON or IO[bytes] + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestProfile] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_profile_request( + test_profile_id=test_profile_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + ) + def delete_test_profile( # pylint: disable=inconsistent-return-statements + self, test_profile_id: str, **kwargs: Any + ) -> None: + """Delete a test profile. + + Delete a test profile by its test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + ) + def get_test_profile(self, test_profile_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestProfile: + """Get load test profile details. + + Get load test profile details by test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfile] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "last_modified_start_time", + "last_modified_end_time", + "test_profile_ids", + "test_ids", + "accept", + ] + }, + ) + def list_test_profiles( + self, + *, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TestProfile"]: + """List test profiles. + + Get all test profiles for the given filters. + + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter test profiles. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter test profiles. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :keyword test_profile_ids: Comma separated list of IDs of the test profiles to filter. Default + value is None. + :paramtype test_profile_ids: list[str] + :keyword test_ids: Comma separated list IDs of the tests which should be associated with the + test profiles to fetch. Default value is None. + :paramtype test_ids: list[str] + :return: An iterator like instance of TestProfile + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TestProfile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestProfile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_profiles_request( + maxpagesize=maxpagesize, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + test_profile_ids=test_profile_ids, + test_ids=test_ids, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestProfile], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class LoadTestRunClientOperationsMixin(LoadTestRunClientMixinABC): + + @overload + def _begin_test_run( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRun, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + @overload + def _begin_test_run( + self, + test_run_id: str, + body: JSON, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + @overload + def _begin_test_run( + self, + test_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: ... + + @distributed_trace + def _begin_test_run( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRun, JSON, IO[bytes]], + *, + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRun: + """Create and start a new test run with the given test run Id. + + Create and start a new test run with the given test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: The resource instance. Is one of the following types: TestRun, JSON, IO[bytes] + Required. + :type body: ~customizations.models.TestRun or JSON or IO[bytes] + :keyword old_test_run_id: Existing test run identifier that should be rerun, if this is + provided, the + test will run with the JMX file, configuration and app components from the + existing test run. You can override the configuration values for new test run + in the request body. Default value is None. + :paramtype old_test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_run_request( + test_run_id=test_run_id, + old_test_run_id=old_test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_app_components( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRunAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: ~customizations.models.TestRunAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_app_components( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_app_components( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRunAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Is one of the following types: TestRunAppComponents, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestRunAppComponents or JSON or IO[bytes] + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRunAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_app_components_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: ~customizations.models.TestRunServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update_server_metrics_config( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Is one of the following types: + TestRunServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~customizations.models.TestRunServerMetricsConfiguration or JSON or IO[bytes] + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_server_metrics_config_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def delete_test_run( # pylint: disable=inconsistent-return-statements + self, test_run_id: str, **kwargs: Any + ) -> None: + """Delete an existing load test run. + + Delete an existing load test run by providing the testRunId. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + def get_app_components( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunAppComponents: + """Get associated app component (collection of azure resources) for the given test + run. + + Get associated app component (collection of azure resources) for the given test + run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_app_components_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_server_metrics_config( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration: + """Get associated server metrics configuration for the given test run. + + Get associated server metrics configuration for the given test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_server_metrics_config_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models4.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestRun: + """Get test run details by test run Id. + + Get test run details by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_test_run_file( + self, test_run_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestRunFileInfo: + """Get test run file by file name. + + Get test run file by file name. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestRunFileInfo. The TestRunFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestRunFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRunFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_file_request( + test_run_id=test_run_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRunFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_metric_dimension_values( + self, + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any + ) -> Iterable[str]: + """List the dimension values for the given metric dimension name. + + List the dimension values for the given metric dimension name. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param name: Dimension name. Required. + :type name: str + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :return: An iterator like instance of str + :rtype: ~azure.core.paging.ItemPaged[str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[str]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metric_dimension_values_request( + test_run_id=test_run_id, + name=name, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + interval=interval, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[str], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def get_metric_definitions( + self, test_run_id: str, *, metric_namespace: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.MetricDefinitionCollection: + """List the metric definitions for a load test run. + + List the metric definitions for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :return: MetricDefinitionCollection. The MetricDefinitionCollection is compatible with + MutableMapping + :rtype: ~customizations.models.MetricDefinitionCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.MetricDefinitionCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_definitions_request( + test_run_id=test_run_id, + metric_namespace=metric_namespace, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.MetricDefinitionCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_metric_namespaces( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.MetricNamespaceCollection: + """List the metric namespaces for a load test run. + + List the metric namespaces for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: MetricNamespaceCollection. The MetricNamespaceCollection is compatible with + MutableMapping + :rtype: ~customizations.models.MetricNamespaceCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.MetricNamespaceCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_namespaces_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.MetricNamespaceCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[_microsoft_loadtestservice_models4.MetricRequestPayload] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: ~customizations.models.MetricRequestPayload + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[JSON] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: JSON + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[IO[bytes]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_metrics( + self, + test_run_id: str, + body: Optional[Union[_microsoft_loadtestservice_models4.MetricRequestPayload, JSON, IO[bytes]]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models4.TimeGrain]] = None, + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Is one of the following types: MetricRequestPayload, + JSON, IO[bytes] Default value is None. + :type body: ~customizations.models.MetricRequestPayload or JSON or IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[List[_microsoft_loadtestservice_models4.TimeSeriesElement]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metrics_request( + test_run_id=test_run_id, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + aggregation=aggregation, + interval=interval, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TimeSeriesElement], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_test_runs( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TestRun"]: + """Get all test runs for the given filters. + + Get all test runs for the given filters. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: executedDateTime + asc. Supported fields - executedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - description, + executedUser. For example, to search for a test run, with description 500 VUs, + the search parameter can be 500. Default value is None. + :paramtype search: str + :keyword test_id: Unique name of an existing load test. Default value is None. + :paramtype test_id: str + :keyword execution_from: Start DateTime(RFC 3339 literal format) of test-run execution time + filter range. Default value is None. + :paramtype execution_from: ~datetime.datetime + :keyword execution_to: End DateTime(RFC 3339 literal format) of test-run execution time filter + range. Default value is None. + :paramtype execution_to: ~datetime.datetime + :keyword status: Comma separated list of test run status. Default value is None. + :paramtype status: str + :return: An iterator like instance of TestRun + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TestRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_runs_request( + orderby=orderby, + search=search, + test_id=test_id, + execution_from=execution_from, + execution_to=execution_to, + status=status, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models4.TestRun], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def stop_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models4.TestRun: + """Stop test run by test run Id. + + Stop test run by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: _microsoft_loadtestservice_models4.TestProfileRun, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + @overload + def _begin_test_profile_run( + self, test_profile_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + @overload + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: ... + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "content_type", "accept"]}, + ) + def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: Union[_microsoft_loadtestservice_models4.TestProfileRun, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Create and start a new test profile run. + + Create and start a new test profile run with the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :param body: The resource instance. Is one of the following types: TestProfileRun, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestProfileRun or JSON or IO[bytes] + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + def delete_test_profile_run( # pylint: disable=inconsistent-return-statements + self, test_profile_run_id: str, **kwargs: Any + ) -> None: + """Delete an existing load test profile run. + + Delete an existing load test profile run by providing the test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + def get_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Get test profile run details. + + Get test profile run details by test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "min_start_date_time", + "max_start_date_time", + "min_end_date_time", + "max_end_date_time", + "created_date_start_time", + "created_date_end_time", + "test_profile_run_ids", + "test_profile_ids", + "statuses", + "accept", + ] + }, + ) + def list_test_profile_runs( + self, + *, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any + ) -> Iterable["_microsoft_loadtestservice_models4.TestProfileRun"]: + """List test profile runs. + + Get all test profile runs for the given filters. + + :keyword min_start_date_time: Minimum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype min_start_date_time: ~datetime.datetime + :keyword max_start_date_time: Maximum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype max_start_date_time: ~datetime.datetime + :keyword min_end_date_time: Minimum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype min_end_date_time: ~datetime.datetime + :keyword max_end_date_time: Maximum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype max_end_date_time: ~datetime.datetime + :keyword created_date_start_time: Start DateTime(RFC 3339 literal format) of the created time + range to filter test profile runs. Default value is None. + :paramtype created_date_start_time: ~datetime.datetime + :keyword created_date_end_time: End DateTime(RFC 3339 literal format) of the created time range + to filter test profile runs. Default value is None. + :paramtype created_date_end_time: ~datetime.datetime + :keyword test_profile_run_ids: Comma separated list of IDs of the test profile runs to filter. + Default value is None. + :paramtype test_profile_run_ids: list[str] + :keyword test_profile_ids: Comma separated IDs of the test profiles which should be associated + with the test profile runs to fetch. Default value is None. + :paramtype test_profile_ids: list[str] + :keyword statuses: Comma separated list of Statuses of the test profile runs to filter. Default + value is None. + :paramtype statuses: list[str] + :return: An iterator like instance of TestProfileRun + :rtype: ~azure.core.paging.ItemPaged[~customizations.models.TestProfileRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models4.TestProfileRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_profile_runs_request( + maxpagesize=maxpagesize, + min_start_date_time=min_start_date_time, + max_start_date_time=max_start_date_time, + min_end_date_time=min_end_date_time, + max_end_date_time=max_end_date_time, + created_date_start_time=created_date_start_time, + created_date_end_time=created_date_end_time, + test_profile_run_ids=test_profile_run_ids, + test_profile_ids=test_profile_ids, + statuses=statuses, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models4.TestProfileRun], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + def stop_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models4.TestProfileRun: + """Stop test profile run. + + Stop test profile run for the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models4.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models4.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_serialization.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_serialization.py new file mode 100644 index 000000000000..eb86ea23c965 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_serialization.py @@ -0,0 +1,2032 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Dict, + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, + List, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[Dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec # pylint: disable=eval-used + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec # pylint: disable=eval-used + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py new file mode 100644 index 000000000000..752b2822f9d3 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_validation.py @@ -0,0 +1,50 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if method_added_on > client_api_version: + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and api_version > client_api_version + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_vendor.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_vendor.py new file mode 100644 index 000000000000..5860e8a7726a --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_vendor.py @@ -0,0 +1,34 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration + +if TYPE_CHECKING: + from azure.core import PipelineClient + + from ._serialization import Deserializer, Serializer + + +class LoadTestAdministrationClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "PipelineClient" + _config: LoadTestAdministrationClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" + + +class LoadTestRunClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "PipelineClient" + _config: LoadTestRunClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py new file mode 100644 index 000000000000..da6781129f9e --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/__init__.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import LoadTestAdministrationClient # type: ignore +from ._client import LoadTestRunClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClient", + "LoadTestRunClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py new file mode 100644 index 000000000000..20236dba0418 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_client.py @@ -0,0 +1,178 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING +from typing_extensions import Self + +from azure.core import AsyncPipelineClient +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest + +from .._serialization import Deserializer, Serializer +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration +from ._operations import LoadTestAdministrationClientOperationsMixin, LoadTestRunClientOperationsMixin + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class LoadTestAdministrationClient(LoadTestAdministrationClientOperationsMixin): + """LoadTestAdministrationClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestAdministrationClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) + + +class LoadTestRunClient(LoadTestRunClientOperationsMixin): + """LoadTestRunClient. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + _endpoint = "https://{endpoint}" + self._config = LoadTestRunClientConfiguration(endpoint=endpoint, credential=credential, **kwargs) + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=_endpoint, policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py new file mode 100644 index 000000000000..9a77a5e7a3e4 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_configuration.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, TYPE_CHECKING + +from azure.core.pipeline import policies + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class LoadTestAdministrationClientConfiguration: # pylint: disable=too-many-instance-attributes,name-too-long + """Configuration for LoadTestAdministrationClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) + + +class LoadTestRunClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for LoadTestRunClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param endpoint: Required. + :type endpoint: str + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :keyword api_version: The API version to use for this operation. Default value is + "2024-12-01-preview". Note that overriding this default value may result in unsupported + behavior. + :paramtype api_version: str + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential", **kwargs: Any) -> None: + api_version: str = kwargs.pop("api_version", "2024-12-01-preview") + + if endpoint is None: + raise ValueError("Parameter 'endpoint' must not be None.") + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + + self.endpoint = endpoint + self.credential = credential + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://cnt-prod.loadtesting.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "developer-loadtesting/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py new file mode 100644 index 000000000000..93b9c55d70d8 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/__init__.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import LoadTestAdministrationClientOperationsMixin # type: ignore +from ._operations import LoadTestRunClientOperationsMixin # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "LoadTestAdministrationClientOperationsMixin", + "LoadTestRunClientOperationsMixin", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py new file mode 100644 index 000000000000..6f595d551c4a --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_operations.py @@ -0,0 +1,3476 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import datetime +from io import IOBase +import json +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, List, Optional, TypeVar, Union, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict + +from ....microsoft.loadtestservice import models as _microsoft_loadtestservice_models5 +from ..._model_base import SdkJSONEncoder, _deserialize +from ..._operations._operations import ( + build_load_test_administration_begin_upload_test_file_request, + build_load_test_administration_create_or_update_app_components_request, + build_load_test_administration_create_or_update_server_metrics_config_request, + build_load_test_administration_create_or_update_test_profile_request, + build_load_test_administration_create_or_update_test_request, + build_load_test_administration_delete_test_file_request, + build_load_test_administration_delete_test_profile_request, + build_load_test_administration_delete_test_request, + build_load_test_administration_get_app_components_request, + build_load_test_administration_get_server_metrics_config_request, + build_load_test_administration_get_test_file_request, + build_load_test_administration_get_test_profile_request, + build_load_test_administration_get_test_request, + build_load_test_administration_list_test_files_request, + build_load_test_administration_list_test_profiles_request, + build_load_test_administration_list_tests_request, + build_load_test_run_begin_test_profile_run_request, + build_load_test_run_begin_test_run_request, + build_load_test_run_create_or_update_app_components_request, + build_load_test_run_create_or_update_server_metrics_config_request, + build_load_test_run_delete_test_profile_run_request, + build_load_test_run_delete_test_run_request, + build_load_test_run_get_app_components_request, + build_load_test_run_get_metric_definitions_request, + build_load_test_run_get_metric_namespaces_request, + build_load_test_run_get_server_metrics_config_request, + build_load_test_run_get_test_profile_run_request, + build_load_test_run_get_test_run_file_request, + build_load_test_run_get_test_run_request, + build_load_test_run_list_metric_dimension_values_request, + build_load_test_run_list_metrics_request, + build_load_test_run_list_test_profile_runs_request, + build_load_test_run_list_test_runs_request, + build_load_test_run_stop_test_profile_run_request, + build_load_test_run_stop_test_run_request, +) +from ..._validation import api_version_validation +from .._vendor import LoadTestAdministrationClientMixinABC, LoadTestRunClientMixinABC + +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class LoadTestAdministrationClientOperationsMixin( # pylint: disable=name-too-long + LoadTestAdministrationClientMixinABC +): + + @overload + async def create_or_update_test( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.Test, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: ~customizations.models.Test + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_test( + self, test_id: str, body: Union[_microsoft_loadtestservice_models5.Test, JSON, IO[bytes]], **kwargs: Any + ) -> _microsoft_loadtestservice_models5.Test: + """Create a new test or update an existing test by providing the test Id. + + Create a new test or update an existing test by providing the test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: The resource instance. Is one of the following types: Test, JSON, IO[bytes] + Required. + :type body: ~customizations.models.Test or JSON or IO[bytes] + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.Test] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_app_components( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.TestAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: ~customizations.models.TestAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_app_components( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models5.TestAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Add an app component to a test. + + Add an app component to a test by providing the resource Id, name and type. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: App Component model. Is one of the following types: TestAppComponents, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestAppComponents or JSON or IO[bytes] + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_app_components_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_server_metrics_config( + self, + test_id: str, + body: _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: ~customizations.models.TestServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_server_metrics_config( + self, + test_id: str, + body: Union[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """Configure server metrics for a test. + + Configure server metrics for a test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param body: Server metric configuration model. Is one of the following types: + TestServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~customizations.models.TestServerMetricsConfiguration or JSON or IO[bytes] + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_server_metrics_config_request( + test_id=test_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_app_components( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestAppComponents: + """Get associated app component (collection of azure resources) for the given test. + + Get associated app component (collection of azure resources) for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestAppComponents. The TestAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_app_components_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_server_metrics_config( + self, test_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestServerMetricsConfiguration: + """List server metrics configuration for the given test. + + List server metrics configuration for the given test. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: TestServerMetricsConfiguration. The TestServerMetricsConfiguration is compatible with + MutableMapping + :rtype: ~customizations.models.TestServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_server_metrics_config_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test(self, test_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.Test: + """Get load test details by test Id. + + Get load test details by test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: Test. The Test is compatible with MutableMapping + :rtype: ~customizations.models.Test + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.Test] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.Test, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_file( + self, test_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestFileInfo: + """Get all the files that are associated with a test. + + Get all the files that are associated with a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_test_files( + self, test_id: str, **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TestFileInfo"]: + """Get all test files. + + Get all test files. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: An iterator like instance of TestFileInfo + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TestFileInfo] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_microsoft_loadtestservice_models5.TestFileInfo]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_files_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestFileInfo], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_tests( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.Test"]: + """Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + Get all load tests by the fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: + lastModifiedDateTime asc. Supported fields - lastModifiedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - displayName, + createdBy. For example, to search for a test, with display name is Login Test, + the search parameter can be Login. Default value is None. + :paramtype search: str + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter tests. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter tests. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :return: An iterator like instance of Test + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.Test] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.Test]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_tests_request( + orderby=orderby, + search=search, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models5.Test], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def _begin_upload_test_file( + self, + test_id: str, + file_name: str, + body: bytes, + *, + file_type: Optional[Union[str, _microsoft_loadtestservice_models5.FileType]] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestFileInfo: + """Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + Upload input file for a given test Id. File size can't be more than 50 MB. + Existing file with same name for the given test will be overwritten. File + should be provided in the request body as application/octet-stream. + + :param test_id: Unique name for the load test, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Unique name for test file with file extension like : App.jmx. Required. + :type file_name: str + :param body: The file content as application/octet-stream. Required. + :type body: bytes + :keyword file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". Default value + is None. + :paramtype file_type: str or ~customizations.models.FileType + :return: TestFileInfo. The TestFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: str = kwargs.pop("content_type", _headers.pop("content-type", "application/octet-stream")) + cls: ClsType[_microsoft_loadtestservice_models5.TestFileInfo] = kwargs.pop("cls", None) + + _content = body + + _request = build_load_test_administration_begin_upload_test_file_request( + test_id=test_id, + file_name=file_name, + file_type=file_type, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_test_file(self, test_id: str, file_name: str, **kwargs: Any) -> None: + """Delete file by the file name for a test. + + Delete file by the file name for a test. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_file_request( + test_id=test_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def delete_test(self, test_id: str, **kwargs: Any) -> None: + """Delete a test by its test Id. + + Delete a test by its test Id. + + :param test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_request( + test_id=test_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @overload + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: _microsoft_loadtestservice_models5.TestProfile, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: ~customizations.models.TestProfile + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test_profile( + self, test_profile_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "content_type", "accept"]}, + ) + async def create_or_update_test_profile( + self, + test_profile_id: str, + body: Union[_microsoft_loadtestservice_models5.TestProfile, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Create a new test profile or update an existing test profile. + + Create a new test profile or update an existing test profile by providing the test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :param body: The resource instance. Is one of the following types: TestProfile, JSON, IO[bytes] + Required. + :type body: ~customizations.models.TestProfile or JSON or IO[bytes] + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestProfile] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_administration_create_or_update_test_profile_request( + test_profile_id=test_profile_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + ) + async def delete_test_profile(self, test_profile_id: str, **kwargs: Any) -> None: + """Delete a test profile. + + Delete a test profile by its test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_administration_delete_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_id", "accept"]}, + ) + async def get_test_profile( + self, test_profile_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfile: + """Get load test profile details. + + Get load test profile details by test profile Id. + + :param test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_id: str + :return: TestProfile. The TestProfile is compatible with MutableMapping + :rtype: ~customizations.models.TestProfile + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfile] = kwargs.pop("cls", None) + + _request = build_load_test_administration_get_test_profile_request( + test_profile_id=test_profile_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfile, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "last_modified_start_time", + "last_modified_end_time", + "test_profile_ids", + "test_ids", + "accept", + ] + }, + ) + def list_test_profiles( + self, + *, + last_modified_start_time: Optional[datetime.datetime] = None, + last_modified_end_time: Optional[datetime.datetime] = None, + test_profile_ids: Optional[List[str]] = None, + test_ids: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TestProfile"]: + """List test profiles. + + Get all test profiles for the given filters. + + :keyword last_modified_start_time: Start DateTime(RFC 3339 literal format) of the last updated + time range to filter test profiles. Default value is None. + :paramtype last_modified_start_time: ~datetime.datetime + :keyword last_modified_end_time: End DateTime(RFC 3339 literal format) of the last updated time + range to filter test profiles. Default value is None. + :paramtype last_modified_end_time: ~datetime.datetime + :keyword test_profile_ids: Comma separated list of IDs of the test profiles to filter. Default + value is None. + :paramtype test_profile_ids: list[str] + :keyword test_ids: Comma separated list IDs of the tests which should be associated with the + test profiles to fetch. Default value is None. + :paramtype test_ids: list[str] + :return: An iterator like instance of TestProfile + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TestProfile] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestProfile]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_administration_list_test_profiles_request( + maxpagesize=maxpagesize, + last_modified_start_time=last_modified_start_time, + last_modified_end_time=last_modified_end_time, + test_profile_ids=test_profile_ids, + test_ids=test_ids, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestProfile], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class LoadTestRunClientOperationsMixin(LoadTestRunClientMixinABC): + + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRun, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: JSON, + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + @overload + async def _begin_test_run( + self, + test_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: ... + + @distributed_trace_async + async def _begin_test_run( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRun, JSON, IO[bytes]], + *, + old_test_run_id: Optional[str] = None, + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRun: + """Create and start a new test run with the given test run Id. + + Create and start a new test run with the given test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: The resource instance. Is one of the following types: TestRun, JSON, IO[bytes] + Required. + :type body: ~customizations.models.TestRun or JSON or IO[bytes] + :keyword old_test_run_id: Existing test run identifier that should be rerun, if this is + provided, the + test will run with the JMX file, configuration and app components from the + existing test run. You can override the configuration values for new test run + in the request body. Default value is None. + :paramtype old_test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_run_request( + test_run_id=test_run_id, + old_test_run_id=old_test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_app_components( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRunAppComponents, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: ~customizations.models.TestRunAppComponents + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_app_components( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_app_components( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRunAppComponents, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Add an app component to a test run. + + Add an app component to a test run by providing the resource Id, name and type. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: App Component model. Is one of the following types: TestRunAppComponents, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestRunAppComponents or JSON or IO[bytes] + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRunAppComponents] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_app_components_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: ~customizations.models.TestRunServerMetricsConfiguration + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update_server_metrics_config( + self, test_run_id: str, body: IO[bytes], *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/merge-patch+json". + :paramtype content_type: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update_server_metrics_config( + self, + test_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Configure server metrics for a test run. + + Configure server metrics for a test run. + + :param test_run_id: Unique Id for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Server metric configuration model. Is one of the following types: + TestRunServerMetricsConfiguration, JSON, IO[bytes] Required. + :type body: ~customizations.models.TestRunServerMetricsConfiguration or JSON or IO[bytes] + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("content-type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_create_or_update_server_metrics_config_request( + test_run_id=test_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def delete_test_run(self, test_run_id: str, **kwargs: Any) -> None: + """Delete an existing load test run. + + Delete an existing load test run by providing the testRunId. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + async def get_app_components( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunAppComponents: + """Get associated app component (collection of azure resources) for the given test + run. + + Get associated app component (collection of azure resources) for the given test + run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunAppComponents. The TestRunAppComponents is compatible with MutableMapping + :rtype: ~customizations.models.TestRunAppComponents + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunAppComponents] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_app_components_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunAppComponents, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_server_metrics_config( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration: + """Get associated server metrics configuration for the given test run. + + Get associated server metrics configuration for the given test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRunServerMetricsConfiguration. The TestRunServerMetricsConfiguration is compatible + with MutableMapping + :rtype: ~customizations.models.TestRunServerMetricsConfiguration + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_server_metrics_config_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize( + _microsoft_loadtestservice_models5.TestRunServerMetricsConfiguration, response.json() + ) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.TestRun: + """Get test run details by test run Id. + + Get test run details by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_test_run_file( + self, test_run_id: str, file_name: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestRunFileInfo: + """Get test run file by file name. + + Get test run file by file name. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param file_name: Name of the file. Required. + :type file_name: str + :return: TestRunFileInfo. The TestRunFileInfo is compatible with MutableMapping + :rtype: ~customizations.models.TestRunFileInfo + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRunFileInfo] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_run_file_request( + test_run_id=test_run_id, + file_name=file_name, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRunFileInfo, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def list_metric_dimension_values( + self, + test_run_id: str, + name: str, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + **kwargs: Any + ) -> AsyncIterable[str]: + """List the dimension values for the given metric dimension name. + + List the dimension values for the given metric dimension name. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param name: Dimension name. Required. + :type name: str + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :return: An iterator like instance of str + :rtype: ~azure.core.async_paging.AsyncItemPaged[str] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[str]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metric_dimension_values_request( + test_run_id=test_run_id, + name=name, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + interval=interval, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[str], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def get_metric_definitions( + self, test_run_id: str, *, metric_namespace: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.MetricDefinitionCollection: + """List the metric definitions for a load test run. + + List the metric definitions for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :return: MetricDefinitionCollection. The MetricDefinitionCollection is compatible with + MutableMapping + :rtype: ~customizations.models.MetricDefinitionCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.MetricDefinitionCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_definitions_request( + test_run_id=test_run_id, + metric_namespace=metric_namespace, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.MetricDefinitionCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_metric_namespaces( + self, test_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.MetricNamespaceCollection: + """List the metric namespaces for a load test run. + + List the metric namespaces for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: MetricNamespaceCollection. The MetricNamespaceCollection is compatible with + MutableMapping + :rtype: ~customizations.models.MetricNamespaceCollection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.MetricNamespaceCollection] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_metric_namespaces_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.MetricNamespaceCollection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[_microsoft_loadtestservice_models5.MetricRequestPayload] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: ~customizations.models.MetricRequestPayload + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[JSON] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: JSON + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def list_metrics( + self, + test_run_id: str, + body: Optional[IO[bytes]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Default value is None. + :type body: IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def list_metrics( + self, + test_run_id: str, + body: Optional[Union[_microsoft_loadtestservice_models5.MetricRequestPayload, JSON, IO[bytes]]] = None, + *, + metric_name: str, + metric_namespace: str, + time_interval: str, + aggregation: Optional[str] = None, + interval: Optional[Union[str, _microsoft_loadtestservice_models5.TimeGrain]] = None, + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TimeSeriesElement"]: + """List the metric values for a load test run. + + List the metric values for a load test run. + + :param test_run_id: Unique name for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :param body: Metric dimension filter. Is one of the following types: MetricRequestPayload, + JSON, IO[bytes] Default value is None. + :type body: ~customizations.models.MetricRequestPayload or JSON or IO[bytes] + :keyword metric_name: Metric name. Required. + :paramtype metric_name: str + :keyword metric_namespace: Metric namespace to query metric definitions for. Required. + :paramtype metric_namespace: str + :keyword time_interval: The timespan of the query. It is a string with the following format + 'startDateTime_ISO/endDateTime_ISO'. Required. + :paramtype time_interval: str + :keyword aggregation: The aggregation. Default value is None. + :paramtype aggregation: str + :keyword interval: The interval (i.e. timegrain) of the query. Known values are: "PT5S", + "PT10S", "PT1M", "PT5M", and "PT1H". Default value is None. + :paramtype interval: str or ~customizations.models.TimeGrain + :return: An iterator like instance of TimeSeriesElement + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TimeSeriesElement] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[List[_microsoft_loadtestservice_models5.TimeSeriesElement]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + else: + _content = None + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_metrics_request( + test_run_id=test_run_id, + metric_name=metric_name, + metric_namespace=metric_namespace, + time_interval=time_interval, + aggregation=aggregation, + interval=interval, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TimeSeriesElement], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_test_runs( + self, + *, + orderby: Optional[str] = None, + search: Optional[str] = None, + test_id: Optional[str] = None, + execution_from: Optional[datetime.datetime] = None, + execution_to: Optional[datetime.datetime] = None, + status: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TestRun"]: + """Get all test runs for the given filters. + + Get all test runs for the given filters. + + :keyword orderby: Sort on the supported fields in (field asc/desc) format. eg: executedDateTime + asc. Supported fields - executedDateTime. Default value is None. + :paramtype orderby: str + :keyword search: Prefix based, case sensitive search on searchable fields - description, + executedUser. For example, to search for a test run, with description 500 VUs, + the search parameter can be 500. Default value is None. + :paramtype search: str + :keyword test_id: Unique name of an existing load test. Default value is None. + :paramtype test_id: str + :keyword execution_from: Start DateTime(RFC 3339 literal format) of test-run execution time + filter range. Default value is None. + :paramtype execution_from: ~datetime.datetime + :keyword execution_to: End DateTime(RFC 3339 literal format) of test-run execution time filter + range. Default value is None. + :paramtype execution_to: ~datetime.datetime + :keyword status: Comma separated list of test run status. Default value is None. + :paramtype status: str + :return: An iterator like instance of TestRun + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TestRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_runs_request( + orderby=orderby, + search=search, + test_id=test_id, + execution_from=execution_from, + execution_to=execution_to, + status=status, + maxpagesize=maxpagesize, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize(List[_microsoft_loadtestservice_models5.TestRun], deserialized.get("value", [])) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + async def stop_test_run(self, test_run_id: str, **kwargs: Any) -> _microsoft_loadtestservice_models5.TestRun: + """Stop test run by test run Id. + + Stop test run by test run Id. + + :param test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_run_id: str + :return: TestRun. The TestRun is compatible with MutableMapping + :rtype: ~customizations.models.TestRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_run_request( + test_run_id=test_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: _microsoft_loadtestservice_models5.TestProfileRun, + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + @overload + async def _begin_test_profile_run( + self, test_profile_run_id: str, body: JSON, *, content_type: str = "application/merge-patch+json", **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + @overload + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: IO[bytes], + *, + content_type: str = "application/merge-patch+json", + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: ... + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "content_type", "accept"]}, + ) + async def _begin_test_profile_run( + self, + test_profile_run_id: str, + body: Union[_microsoft_loadtestservice_models5.TestProfileRun, JSON, IO[bytes]], + **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Create and start a new test profile run. + + Create and start a new test profile run with the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :param body: The resource instance. Is one of the following types: TestProfileRun, JSON, + IO[bytes] Required. + :type body: ~customizations.models.TestProfileRun or JSON or IO[bytes] + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + content_type = content_type or "application/merge-patch+json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_load_test_run_begin_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + async def delete_test_profile_run(self, test_profile_run_id: str, **kwargs: Any) -> None: + """Delete an existing load test profile run. + + Delete an existing load test profile run by providing the test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + + _request = build_load_test_run_delete_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + async def get_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Get test profile run details. + + Get test profile run details by test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_get_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={ + "2024-05-01-preview": [ + "api_version", + "maxpagesize", + "min_start_date_time", + "max_start_date_time", + "min_end_date_time", + "max_end_date_time", + "created_date_start_time", + "created_date_end_time", + "test_profile_run_ids", + "test_profile_ids", + "statuses", + "accept", + ] + }, + ) + def list_test_profile_runs( + self, + *, + min_start_date_time: Optional[datetime.datetime] = None, + max_start_date_time: Optional[datetime.datetime] = None, + min_end_date_time: Optional[datetime.datetime] = None, + max_end_date_time: Optional[datetime.datetime] = None, + created_date_start_time: Optional[datetime.datetime] = None, + created_date_end_time: Optional[datetime.datetime] = None, + test_profile_run_ids: Optional[List[str]] = None, + test_profile_ids: Optional[List[str]] = None, + statuses: Optional[List[str]] = None, + **kwargs: Any + ) -> AsyncIterable["_microsoft_loadtestservice_models5.TestProfileRun"]: + """List test profile runs. + + Get all test profile runs for the given filters. + + :keyword min_start_date_time: Minimum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype min_start_date_time: ~datetime.datetime + :keyword max_start_date_time: Maximum Start DateTime(RFC 3339 literal format) of the test + profile runs to filter on. Default value is None. + :paramtype max_start_date_time: ~datetime.datetime + :keyword min_end_date_time: Minimum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype min_end_date_time: ~datetime.datetime + :keyword max_end_date_time: Maximum End DateTime(RFC 3339 literal format) of the test profile + runs to filter on. Default value is None. + :paramtype max_end_date_time: ~datetime.datetime + :keyword created_date_start_time: Start DateTime(RFC 3339 literal format) of the created time + range to filter test profile runs. Default value is None. + :paramtype created_date_start_time: ~datetime.datetime + :keyword created_date_end_time: End DateTime(RFC 3339 literal format) of the created time range + to filter test profile runs. Default value is None. + :paramtype created_date_end_time: ~datetime.datetime + :keyword test_profile_run_ids: Comma separated list of IDs of the test profile runs to filter. + Default value is None. + :paramtype test_profile_run_ids: list[str] + :keyword test_profile_ids: Comma separated IDs of the test profiles which should be associated + with the test profile runs to fetch. Default value is None. + :paramtype test_profile_ids: list[str] + :keyword statuses: Comma separated list of Statuses of the test profile runs to filter. Default + value is None. + :paramtype statuses: list[str] + :return: An iterator like instance of TestProfileRun + :rtype: ~azure.core.async_paging.AsyncItemPaged[~customizations.models.TestProfileRun] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + maxpagesize = kwargs.pop("maxpagesize", None) + cls: ClsType[List[_microsoft_loadtestservice_models5.TestProfileRun]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_load_test_run_list_test_profile_runs_request( + maxpagesize=maxpagesize, + min_start_date_time=min_start_date_time, + max_start_date_time=max_start_date_time, + min_end_date_time=min_end_date_time, + max_end_date_time=max_end_date_time, + created_date_start_time=created_date_start_time, + created_date_end_time=created_date_end_time, + test_profile_run_ids=test_profile_run_ids, + test_profile_ids=test_profile_ids, + statuses=statuses, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_microsoft_loadtestservice_models5.TestProfileRun], deserialized.get("value", []) + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace_async + @api_version_validation( + method_added_on="2024-05-01-preview", + params_added_on={"2024-05-01-preview": ["api_version", "test_profile_run_id", "accept"]}, + ) + async def stop_test_profile_run( + self, test_profile_run_id: str, **kwargs: Any + ) -> _microsoft_loadtestservice_models5.TestProfileRun: + """Stop test profile run. + + Stop test profile run for the given test profile run Id. + + :param test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :type test_profile_run_id: str + :return: TestProfileRun. The TestProfileRun is compatible with MutableMapping + :rtype: ~customizations.models.TestProfileRun + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_microsoft_loadtestservice_models5.TestProfileRun] = kwargs.pop("cls", None) + + _request = build_load_test_run_stop_test_profile_run_request( + test_profile_run_id=test_profile_run_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str"), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if _stream: + deserialized = response.iter_bytes() + else: + deserialized = _deserialize(_microsoft_loadtestservice_models5.TestProfileRun, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_operations/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_vendor.py b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_vendor.py new file mode 100644 index 000000000000..cc211a4758bb --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/aio/_vendor.py @@ -0,0 +1,34 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from abc import ABC +from typing import TYPE_CHECKING + +from ._configuration import LoadTestAdministrationClientConfiguration, LoadTestRunClientConfiguration + +if TYPE_CHECKING: + from azure.core import AsyncPipelineClient + + from .._serialization import Deserializer, Serializer + + +class LoadTestAdministrationClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "AsyncPipelineClient" + _config: LoadTestAdministrationClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" + + +class LoadTestRunClientMixinABC(ABC): + """DO NOT use this class. It is for internal typing use only.""" + + _client: "AsyncPipelineClient" + _config: LoadTestRunClientConfiguration + _serialize: "Serializer" + _deserialize: "Deserializer" diff --git a/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed b/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/customizations/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py new file mode 100644 index 000000000000..d29d34b30efd --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test.py @@ -0,0 +1,86 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_test( + test_id="12345678-1234-1234-1234-123456789012", + body={ + "autoStopCriteria": {"autoStopDisabled": True, "errorRate": 70, "errorRateTimeWindowInSeconds": 60}, + "description": "sample description", + "displayName": "Performance_LoadTest", + "engineBuiltInIdentityIds": [ + "/subscriptions/10000000-0000-0000-0000-000000000000/resourceGroups/samplerg1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sampleresourcename" + ], + "engineBuiltInIdentityType": "UserAssigned", + "environmentVariables": {"envvar1": "sampletext"}, + "keyvaultReferenceIdentityId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/sampleprovider/sampleresourcetype/sampleresourcename", + "keyvaultReferenceIdentityType": "UserAssigned", + "loadTestConfiguration": {"engineInstances": 6, "splitAllCSVs": True}, + "metricsReferenceIdentityId": "/subscriptions/10000000-0000-0000-0000-000000000000/resourceGroups/samplerg1/providers/Microsoft.ManagedIdentity/userAssignedIdentities/sampleresourcename", + "metricsReferenceIdentityType": "UserAssigned", + "passFailCriteria": { + "passFailMetrics": { + "fefd759d-7fe8-4f83-8b6d-aeebe0f491fe": { + "action": "continue", + "aggregate": "percentage", + "clientMetric": "response_time_ms", + "condition": ">", + "value": 20, + } + }, + "passFailServerMetrics": { + "fefd759d-7fe8-4f83-8b6d-aeebe0f491fe": { + "action": "continue", + "aggregation": "Average", + "condition": ">", + "metricName": "Percentage CPU", + "metricNamespace": "Microsoft.Compute/virtualMachines", + "resourceId": "/subscriptions/12345678-1234-1234-1234-123456789abc/resourceGroups/MyResourceGroup/providers/Microsoft.Compute/virtualMachines/MyVM", + "value": 20, + } + }, + }, + "secrets": { + "secret1": { + "type": "AKV_SECRET_URI", + "value": "https://samplevault.vault.azure.net/secrets/samplesecret/f113f91fd4c44a368049849c164db827", + } + }, + "subnetId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/Microsoft.Network/virtualNetworks/samplenetworkresource/subnets/AAAAA0A0A0", + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py new file mode 100644 index 000000000000..2c4a39aa02f6 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_app_components.py @@ -0,0 +1,52 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_app_components( + test_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + body={ + "components": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource": { + "displayName": "Performance_LoadTest_Insights", + "kind": "web", + "resourceName": "appcomponentresource", + "resourceType": "microsoft.insights/components", + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py new file mode 100644 index 000000000000..2206f091558d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_profile.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + body={ + "description": "sample description", + "displayName": "Performance_TestProfile", + "targetResourceConfigurations": { + "configurations": { + "config1": {"httpConcurrency": 16, "instanceMemoryMB": 2048}, + "config2": {"httpConcurrency": 16, "instanceMemoryMB": 4096}, + }, + "kind": "FunctionsFlexConsumption", + }, + "targetResourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/Microsoft.Web/sites/myfunctionapp", + "testId": "12346-abcd-6789", + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py new file mode 100644 index 000000000000..281ddf97e9c4 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_app_components.py @@ -0,0 +1,52 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_run_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_app_components( + test_run_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + body={ + "components": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource": { + "displayName": "Performance_LoadTest_Insights", + "kind": "web", + "resourceName": "appcomponentresource", + "resourceType": "microsoft.insights/components", + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestRunAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py new file mode 100644 index 000000000000..63e643ee0cec --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_run_server_metrics_config.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_run_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_server_metrics_config( + test_run_id="edc6e529-d009-4b99-b763-ca492e3a2823", + body={ + "metrics": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource/providers/microsoft.insights/metricdefinitions/requests/duration": { + "aggregation": "Average", + "displayDescription": "sample description", + "metricNamespace": "microsoft.insights/components", + "name": "requests/duration", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource", + "resourceType": "microsoft.insights/components", + "unit": None, + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestRunServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py new file mode 100644 index 000000000000..558421e277fa --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/create_or_update_test_server_metrics_config.py @@ -0,0 +1,55 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python create_or_update_test_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.create_or_update_server_metrics_config( + test_id="edc6e529-d009-4b99-b763-ca492e3a2823", + body={ + "metrics": { + "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource/providers/microsoft.insights/metricdefinitions/requests/duration": { + "aggregation": "Average", + "displayDescription": "sample description", + "metricNamespace": "microsoft.insights/components", + "name": "requests/duration", + "resourceId": "/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/samplerg/providers/microsoft.insights/components/appcomponentresource", + "resourceType": "microsoft.insights/components", + "unit": None, + } + } + }, + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/CreateOrUpdateTestServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py new file mode 100644 index 000000000000..895ea887bb37 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test( + test_id="12345678-1234-1234-1234-123456789012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py new file mode 100644 index 000000000000..24eead669e3d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_file.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_file( + test_id="12345678-1234-1234-1234-123456789012", + file_name="app.jmx", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py new file mode 100644 index 000000000000..e9ebe66beb17 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py new file mode 100644 index 000000000000..f033049beda1 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_profile_run.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_profile_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestProfileRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py new file mode 100644 index 000000000000..27a552886088 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/delete_test_run.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python delete_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + client.delete_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + + +# x-ms-original-file: 2024-12-01-preview/DeleteTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py new file mode 100644 index 000000000000..f4fa97b9b85b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test( + test_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTest.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py new file mode 100644 index 000000000000..bf52cbe67585 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_file.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_file( + test_id="12345678-1234-1234-1234-123456789012", + file_name="sample.jmx", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py new file mode 100644 index 000000000000..8ea884a93b2b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile( + test_profile_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py new file mode 100644 index 000000000000..10d9432726d0 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executed.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile_run_executed.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfileRun_Executed.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py new file mode 100644 index 000000000000..31a09483e8e7 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_profile_run_executing.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_profile_run_executing.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestProfileRun_Executing.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py new file mode 100644 index 000000000000..a9dee2f405f0 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py new file mode 100644 index 000000000000..411fe77038c3 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/get_test_run_file.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python get_test_run_file.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_test_run_file( + test_run_id="12316678-1234-1234-1234-122451189012", + file_name="sample.jmx", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/GetTestRunFile.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py new file mode 100644 index 000000000000..0893654b69b7 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_metric_dimension_values.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_metric_dimension_values.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_metric_dimension_values( + test_run_id="12316678-1234-1234-1234-122451189012", + name="SamplerName", + metric_name="ActiveThreads", + metric_namespace="LoadTestRunMetrics", + time_interval="2022-09-24T19:00:40Z/2022-09-25T19:28:40Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListMetricDimensionValues.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py new file mode 100644 index 000000000000..9b49106f0bac --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_app_components.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_app_components( + test_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py new file mode 100644 index 000000000000..a8e43d484485 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_files.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_files.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_files( + test_id="12345678-1234-1234-1234-123456789012", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestFiles.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py new file mode 100644 index 000000000000..ace1b5bd3879 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profile_runs.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_profile_runs.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_profile_runs() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestProfileRuns.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py new file mode 100644 index 000000000000..9049778a085d --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_profiles.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_profiles.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_profiles() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestProfiles.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py new file mode 100644 index 000000000000..4d7e4b6ec69f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_app_components.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_app_components.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_app_components( + test_run_id="ee843bd9-a6d4-4364-a45c-427a03c39fa7", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunAppComponents.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py new file mode 100644 index 000000000000..6e68909f3c2c --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_metrics( + test_run_id="12316678-1234-1234-1234-122451189012", + metric_name="ActiveThreads", + metric_namespace="LoadTestRunMetrics", + time_interval="2022-09-24T19:00:40Z/2022-09-25T19:28:40Z", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetrics.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py new file mode 100644 index 000000000000..95cb2a72ef07 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_definitions.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics_definitions.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_metric_definitions( + test_run_id="12316678-1234-1234-1234-122451189012", + metric_namespace="LoadTestRunMetrics", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetricsDefinitions.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py new file mode 100644 index 000000000000..0c5ca16bf254 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_metrics_namespaces.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_metrics_namespaces.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_metric_namespaces( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunMetricsNamespaces.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py new file mode 100644 index 000000000000..defb11ea513b --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_run_server_metrics_config.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_run_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_server_metrics_config( + test_run_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRunServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py new file mode 100644 index 000000000000..24b5699bb152 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_runs.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_runs.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_test_runs() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTestRuns.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py new file mode 100644 index 000000000000..13b5f3afff32 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_test_server_metrics_config.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_test_server_metrics_config.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.get_server_metrics_config( + test_id="12345678-1234-1234-1234-123456789012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/ListTestServerMetricsConfig.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py new file mode 100644 index 000000000000..810fdaa0b97f --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/list_tests.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestAdministrationClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python list_tests.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.list_tests() + for item in response: + print(item) + + +# x-ms-original-file: 2024-12-01-preview/ListTests.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py new file mode 100644 index 000000000000..a5714bd663f1 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_profile_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python stop_test_profile_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.stop_test_profile_run( + test_profile_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/StopTestProfileRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py new file mode 100644 index 000000000000..8f370f09a7c9 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_samples/stop_test_run.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from customizations import LoadTestRunClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-developer-loadtesting +# USAGE + python stop_test_run.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = LoadTestAdministrationClient( + endpoint="ENDPOINT", + credential=DefaultAzureCredential(), + ) + + response = client.stop_test_run( + test_run_id="12316678-1234-1234-1234-122451189012", + ) + print(response) + + +# x-ms-original-file: 2024-12-01-preview/StopTestRun.json +if __name__ == "__main__": + main() diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py new file mode 100644 index 000000000000..4350841161b5 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/conftest.py @@ -0,0 +1,56 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + loadtestadministration_subscription_id = os.environ.get( + "LOADTESTADMINISTRATION_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_tenant_id = os.environ.get( + "LOADTESTADMINISTRATION_TENANT_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_client_id = os.environ.get( + "LOADTESTADMINISTRATION_CLIENT_ID", "00000000-0000-0000-0000-000000000000" + ) + loadtestadministration_client_secret = os.environ.get( + "LOADTESTADMINISTRATION_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer( + regex=loadtestadministration_subscription_id, value="00000000-0000-0000-0000-000000000000" + ) + add_general_regex_sanitizer(regex=loadtestadministration_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestadministration_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer( + regex=loadtestadministration_client_secret, value="00000000-0000-0000-0000-000000000000" + ) + + loadtestrun_subscription_id = os.environ.get("LOADTESTRUN_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_tenant_id = os.environ.get("LOADTESTRUN_TENANT_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_client_id = os.environ.get("LOADTESTRUN_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + loadtestrun_client_secret = os.environ.get("LOADTESTRUN_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=loadtestrun_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py new file mode 100644 index 000000000000..db860d9d5d8c --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration.py @@ -0,0 +1,341 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import LoadTestAdministrationClientTestBase, LoadTestAdministrationPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestAdministration(LoadTestAdministrationClientTestBase): + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_test( + test_id="str", + body={ + "testId": "str", + "autoStopCriteria": {"autoStopDisabled": bool, "errorRate": 0.0, "errorRateTimeWindowInSeconds": 0}, + "baselineTestRunId": "str", + "certificate": {"name": "str", "type": "str", "value": "str"}, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "engineBuiltInIdentityIds": ["str"], + "engineBuiltInIdentityType": "str", + "environmentVariables": {"str": "str"}, + "inputArtifacts": { + "additionalFileInfo": [ + { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + } + ], + "configFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "inputArtifactsZipFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "testScriptFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "urlTestConfigFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "userPropFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + }, + "keyvaultReferenceIdentityId": "str", + "keyvaultReferenceIdentityType": "str", + "kind": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "loadTestConfiguration": { + "engineInstances": 0, + "optionalLoadTestConfig": { + "duration": 0, + "endpointUrl": "str", + "maxResponseTimeInMs": 0, + "rampUpTime": 0, + "requestsPerSecond": 0, + "virtualUsers": 0, + }, + "quickStartTest": bool, + "regionalLoadTestConfig": [{"engineInstances": 0, "region": "str"}], + "splitAllCSVs": bool, + }, + "metricsReferenceIdentityId": "str", + "metricsReferenceIdentityType": "str", + "passFailCriteria": { + "passFailMetrics": { + "str": { + "action": "str", + "actualValue": 0.0, + "aggregate": "str", + "clientMetric": "str", + "condition": "str", + "requestName": "str", + "result": "str", + "value": 0.0, + } + }, + "passFailServerMetrics": { + "str": { + "aggregation": "str", + "condition": "str", + "metricName": "str", + "metricNamespace": "str", + "resourceId": "str", + "value": 0.0, + "action": "str", + "actualValue": 0.0, + "result": "str", + } + }, + }, + "publicIPDisabled": bool, + "secrets": {"str": {"type": "str", "value": "str"}}, + "subnetId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_app_components(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_app_components( + test_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_server_metrics_config( + test_id="str", + body={ + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_app_components(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_app_components( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_server_metrics_config( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test_file(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_test_files(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_files( + test_id="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_tests(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_tests() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test_file(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_create_or_update_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.create_or_update_test_profile( + test_profile_id="str", + body={ + "testProfileId": "str", + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "targetResourceConfigurations": "target_resource_configurations", + "targetResourceId": "str", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_delete_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.delete_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_get_test_profile(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.get_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy + def test_list_test_profiles(self, loadtestadministration_endpoint): + client = self.create_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_profiles() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py new file mode 100644 index 000000000000..ee605480da90 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_administration_async.py @@ -0,0 +1,342 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import LoadTestAdministrationPreparer +from testpreparer_async import LoadTestAdministrationClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestAdministrationAsync(LoadTestAdministrationClientTestBaseAsync): + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_test( + test_id="str", + body={ + "testId": "str", + "autoStopCriteria": {"autoStopDisabled": bool, "errorRate": 0.0, "errorRateTimeWindowInSeconds": 0}, + "baselineTestRunId": "str", + "certificate": {"name": "str", "type": "str", "value": "str"}, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "engineBuiltInIdentityIds": ["str"], + "engineBuiltInIdentityType": "str", + "environmentVariables": {"str": "str"}, + "inputArtifacts": { + "additionalFileInfo": [ + { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + } + ], + "configFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "inputArtifactsZipFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "testScriptFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "urlTestConfigFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + "userPropFileInfo": { + "fileName": "str", + "expireDateTime": "2020-02-20 00:00:00", + "fileType": "str", + "url": "str", + "validationFailureDetails": "str", + "validationStatus": "str", + }, + }, + "keyvaultReferenceIdentityId": "str", + "keyvaultReferenceIdentityType": "str", + "kind": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "loadTestConfiguration": { + "engineInstances": 0, + "optionalLoadTestConfig": { + "duration": 0, + "endpointUrl": "str", + "maxResponseTimeInMs": 0, + "rampUpTime": 0, + "requestsPerSecond": 0, + "virtualUsers": 0, + }, + "quickStartTest": bool, + "regionalLoadTestConfig": [{"engineInstances": 0, "region": "str"}], + "splitAllCSVs": bool, + }, + "metricsReferenceIdentityId": "str", + "metricsReferenceIdentityType": "str", + "passFailCriteria": { + "passFailMetrics": { + "str": { + "action": "str", + "actualValue": 0.0, + "aggregate": "str", + "clientMetric": "str", + "condition": "str", + "requestName": "str", + "result": "str", + "value": 0.0, + } + }, + "passFailServerMetrics": { + "str": { + "aggregation": "str", + "condition": "str", + "metricName": "str", + "metricNamespace": "str", + "resourceId": "str", + "value": 0.0, + "action": "str", + "actualValue": 0.0, + "result": "str", + } + }, + }, + "publicIPDisabled": bool, + "secrets": {"str": {"type": "str", "value": "str"}}, + "subnetId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_app_components(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_app_components( + test_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_server_metrics_config( + test_id="str", + body={ + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_app_components(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_app_components( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_server_metrics_config(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_server_metrics_config( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test_file(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_test_files(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_files( + test_id="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_tests(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_tests() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test_file(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test_file( + test_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test( + test_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_create_or_update_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.create_or_update_test_profile( + test_profile_id="str", + body={ + "testProfileId": "str", + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "description": "str", + "displayName": "str", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "targetResourceConfigurations": "target_resource_configurations", + "targetResourceId": "str", + "testId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_delete_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.delete_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_get_test_profile(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = await client.get_test_profile( + test_profile_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestAdministrationPreparer() + @recorded_by_proxy_async + async def test_list_test_profiles(self, loadtestadministration_endpoint): + client = self.create_async_client(endpoint=loadtestadministration_endpoint) + response = client.list_test_profiles() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py new file mode 100644 index 000000000000..895274de2057 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run.py @@ -0,0 +1,242 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils import recorded_by_proxy +from testpreparer import LoadTestRunClientTestBase, LoadTestRunPreparer + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestRun(LoadTestRunClientTestBase): + @LoadTestRunPreparer() + @recorded_by_proxy + def test_create_or_update_app_components(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.create_or_update_app_components( + test_run_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_create_or_update_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.create_or_update_server_metrics_config( + test_run_id="str", + body={ + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_delete_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.delete_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_app_components(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_app_components( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_server_metrics_config( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_run_file(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_run_file( + test_run_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_metric_dimension_values(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_metric_dimension_values( + test_run_id="str", + name="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_metric_definitions(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_metric_definitions( + test_run_id="str", + metric_namespace="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_metric_namespaces(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_metric_namespaces( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_metrics(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_metrics( + test_run_id="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_test_runs(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_test_runs() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_stop_test_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.stop_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_delete_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.delete_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_get_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.get_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_list_test_profile_runs(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.list_test_profile_runs() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy + def test_stop_test_profile_run(self, loadtestrun_endpoint): + client = self.create_client(endpoint=loadtestrun_endpoint) + response = client.stop_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py new file mode 100644 index 000000000000..963c78291566 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/test_load_test_run_async.py @@ -0,0 +1,243 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from devtools_testutils.aio import recorded_by_proxy_async +from testpreparer import LoadTestRunPreparer +from testpreparer_async import LoadTestRunClientTestBaseAsync + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestLoadTestRunAsync(LoadTestRunClientTestBaseAsync): + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_create_or_update_app_components(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.create_or_update_app_components( + test_run_id="str", + body={ + "components": { + "str": { + "resourceId": "str", + "resourceName": "str", + "resourceType": "str", + "displayName": "str", + "kind": "str", + "resourceGroup": "str", + "subscriptionId": "str", + } + }, + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_create_or_update_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.create_or_update_server_metrics_config( + test_run_id="str", + body={ + "createdBy": "str", + "createdDateTime": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedDateTime": "2020-02-20 00:00:00", + "metrics": { + "str": { + "aggregation": "str", + "metricNamespace": "str", + "name": "str", + "resourceId": "str", + "resourceType": "str", + "displayDescription": "str", + "id": "str", + "unit": "str", + } + }, + "testRunId": "str", + }, + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_delete_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.delete_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_app_components(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_app_components( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_server_metrics_config(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_server_metrics_config( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_run_file(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_run_file( + test_run_id="str", + file_name="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_metric_dimension_values(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_metric_dimension_values( + test_run_id="str", + name="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_metric_definitions(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_metric_definitions( + test_run_id="str", + metric_namespace="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_metric_namespaces(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_metric_namespaces( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_metrics(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_metrics( + test_run_id="str", + metric_name="str", + metric_namespace="str", + time_interval="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_test_runs(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_test_runs() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_stop_test_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.stop_test_run( + test_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_delete_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.delete_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_get_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.get_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_list_test_profile_runs(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = client.list_test_profile_runs() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @LoadTestRunPreparer() + @recorded_by_proxy_async + async def test_stop_test_profile_run(self, loadtestrun_endpoint): + client = self.create_async_client(endpoint=loadtestrun_endpoint) + response = await client.stop_test_profile_run( + test_profile_run_id="str", + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py new file mode 100644 index 000000000000..a3c665485dac --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from customizations import LoadTestAdministrationClient, LoadTestRunClient +from devtools_testutils import AzureRecordedTestCase, PowerShellPreparer +import functools + + +class LoadTestAdministrationClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(LoadTestAdministrationClient) + return self.create_client_from_credential( + LoadTestAdministrationClient, + credential=credential, + endpoint=endpoint, + ) + + +LoadTestAdministrationPreparer = functools.partial( + PowerShellPreparer, + "loadtestadministration", + loadtestadministration_endpoint="https://fake_loadtestadministration_endpoint.com", +) + + +class LoadTestRunClientTestBase(AzureRecordedTestCase): + + def create_client(self, endpoint): + credential = self.get_credential(LoadTestRunClient) + return self.create_client_from_credential( + LoadTestRunClient, + credential=credential, + endpoint=endpoint, + ) + + +LoadTestRunPreparer = functools.partial( + PowerShellPreparer, "loadtestrun", loadtestrun_endpoint="https://fake_loadtestrun_endpoint.com" +) diff --git a/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py new file mode 100644 index 000000000000..b02b742c2a01 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/generated_tests/testpreparer_async.py @@ -0,0 +1,31 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from customizations.aio import LoadTestAdministrationClient, LoadTestRunClient +from devtools_testutils import AzureRecordedTestCase + + +class LoadTestAdministrationClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(LoadTestAdministrationClient, is_async=True) + return self.create_client_from_credential( + LoadTestAdministrationClient, + credential=credential, + endpoint=endpoint, + ) + + +class LoadTestRunClientTestBaseAsync(AzureRecordedTestCase): + + def create_async_client(self, endpoint): + credential = self.get_credential(LoadTestRunClient, is_async=True) + return self.create_client_from_credential( + LoadTestRunClient, + credential=credential, + endpoint=endpoint, + ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py new file mode 100644 index 000000000000..6e644e25a518 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/__init__.py @@ -0,0 +1,156 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + AppComponent, + ArtifactsContainerInfo, + AutoStopCriteria, + CertificateMetadata, + DimensionFilter, + DimensionValue, + ErrorDetails, + FunctionFlexConsumptionResourceConfiguration, + FunctionFlexConsumptionTargetResourceConfigurations, + LoadTestConfiguration, + MetricAvailability, + MetricDefinition, + MetricDefinitionCollection, + MetricNamespace, + MetricNamespaceCollection, + MetricRequestPayload, + MetricValue, + NameAndDescription, + OptionalLoadTestConfiguration, + PassFailCriteria, + PassFailMetric, + PassFailServerMetric, + RegionalConfiguration, + ResourceMetric, + Secret, + TargetResourceConfigurations, + Test, + TestAppComponents, + TestFileInfo, + TestInputArtifacts, + TestProfile, + TestProfileRun, + TestProfileRunRecommendation, + TestRun, + TestRunAppComponents, + TestRunArtifacts, + TestRunDetail, + TestRunFileInfo, + TestRunInputArtifacts, + TestRunOutputArtifacts, + TestRunServerMetricsConfiguration, + TestRunStatistics, + TestServerMetricsConfiguration, + TimeSeriesElement, +) + +from ._enums import ( # type: ignore + Aggregation, + CertificateType, + CreatedByType, + FileType, + FileValidationStatus, + ManagedIdentityType, + MetricUnit, + PFMetrics, + PassFailAction, + PassFailAggregationFunction, + PassFailResult, + PassFailTestResult, + RecommendationCategory, + RequestDataLevel, + ResourceKind, + SecretType, + TestKind, + TestProfileRunStatus, + TestRunStatus, + TimeGrain, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AppComponent", + "ArtifactsContainerInfo", + "AutoStopCriteria", + "CertificateMetadata", + "DimensionFilter", + "DimensionValue", + "ErrorDetails", + "FunctionFlexConsumptionResourceConfiguration", + "FunctionFlexConsumptionTargetResourceConfigurations", + "LoadTestConfiguration", + "MetricAvailability", + "MetricDefinition", + "MetricDefinitionCollection", + "MetricNamespace", + "MetricNamespaceCollection", + "MetricRequestPayload", + "MetricValue", + "NameAndDescription", + "OptionalLoadTestConfiguration", + "PassFailCriteria", + "PassFailMetric", + "PassFailServerMetric", + "RegionalConfiguration", + "ResourceMetric", + "Secret", + "TargetResourceConfigurations", + "Test", + "TestAppComponents", + "TestFileInfo", + "TestInputArtifacts", + "TestProfile", + "TestProfileRun", + "TestProfileRunRecommendation", + "TestRun", + "TestRunAppComponents", + "TestRunArtifacts", + "TestRunDetail", + "TestRunFileInfo", + "TestRunInputArtifacts", + "TestRunOutputArtifacts", + "TestRunServerMetricsConfiguration", + "TestRunStatistics", + "TestServerMetricsConfiguration", + "TimeSeriesElement", + "Aggregation", + "CertificateType", + "CreatedByType", + "FileType", + "FileValidationStatus", + "ManagedIdentityType", + "MetricUnit", + "PFMetrics", + "PassFailAction", + "PassFailAggregationFunction", + "PassFailResult", + "PassFailTestResult", + "RecommendationCategory", + "RequestDataLevel", + "ResourceKind", + "SecretType", + "TestKind", + "TestProfileRunStatus", + "TestRunStatus", + "TimeGrain", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py new file mode 100644 index 000000000000..f6ecc0080712 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_enums.py @@ -0,0 +1,317 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class Aggregation(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Aggregation type.""" + + AVERAGE = "Average" + """Average value.""" + COUNT = "Count" + """Total count.""" + NONE = "None" + """Aggregation will be average in this case.""" + TOTAL = "Total" + """Total sum.""" + PERCENTILE75 = "Percentile75" + """75th percentile.""" + PERCENTILE90 = "Percentile90" + """90th percentile.""" + PERCENTILE95 = "Percentile95" + """95th percentile.""" + PERCENTILE96 = "Percentile96" + """96th percentile.""" + PERCENTILE97 = "Percentile97" + """97th percentile.""" + PERCENTILE98 = "Percentile98" + """98th percentile.""" + PERCENTILE99 = "Percentile99" + """99th percentile.""" + PERCENTILE999 = "Percentile999" + """99.9th percentile.""" + PERCENTILE9999 = "Percentile9999" + """99.99th percentile.""" + + +class CertificateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of certificates supported.""" + + KEY_VAULT_CERTIFICATE_URI = "AKV_CERT_URI" + """If the certificate is stored in an Azure Key Vault.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the entity that created the test run. (E.x. User, ScheduleTrigger, etc).""" + + USER = "User" + """Entity was created by a user.""" + SCHEDULED_TRIGGER = "ScheduledTrigger" + """Entity was created by a scheduled trigger.""" + + +class FileType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of file supported.""" + + JMX_FILE = "JMX_FILE" + """If the file is a JMX script.""" + USER_PROPERTIES = "USER_PROPERTIES" + """If the file is a user properties file.""" + ADDITIONAL_ARTIFACTS = "ADDITIONAL_ARTIFACTS" + """If the file is not among any of the other supported file types.""" + ZIPPED_ARTIFACTS = "ZIPPED_ARTIFACTS" + """If the file is a compressed archive containing a collection of various artifacts or resources.""" + URL_TEST_CONFIG = "URL_TEST_CONFIG" + """If the file is a JSON config file to define the requests for a URL test.""" + TEST_SCRIPT = "TEST_SCRIPT" + """If the file is a test script.""" + + +class FileValidationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """File status.""" + + NOT_VALIDATED = "NOT_VALIDATED" + """File is not validated.""" + VALIDATION_SUCCESS = "VALIDATION_SUCCESS" + """File is validated.""" + VALIDATION_FAILURE = "VALIDATION_FAILURE" + """File validation is failed.""" + VALIDATION_INITIATED = "VALIDATION_INITIATED" + """File validation is in progress.""" + VALIDATION_NOT_REQUIRED = "VALIDATION_NOT_REQUIRED" + """Validation is not required.""" + + +class ManagedIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Managed identity type.""" + + SYSTEM_ASSIGNED = "SystemAssigned" + """System-assigned managed identity.""" + USER_ASSIGNED = "UserAssigned" + """User-assigned managed identity.""" + + +class MetricUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metric unit.""" + + NOT_SPECIFIED = "NotSpecified" + """No unit specified.""" + PERCENT = "Percent" + """Percentage.""" + COUNT = "Count" + """Value count.""" + SECONDS = "Seconds" + """Seconds.""" + MILLISECONDS = "Milliseconds" + """Milliseconds""" + BYTES = "Bytes" + """Bytes""" + BYTES_PER_SECOND = "BytesPerSecond" + """Bytes per second""" + COUNT_PER_SECOND = "CountPerSecond" + """Count per second""" + + +class PassFailAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Action to take on failure of pass/fail criteria.""" + + CONTINUE = "continue" + """Test will continue to run even if pass fail metric criteria metric gets failed.""" + STOP = "stop" + """Test run will stop if pass fail criteria metric is not passed.""" + + +class PassFailAggregationFunction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Aggregation functions for pass/fail criteria.""" + + COUNT = "count" + """Criteria applies for count value.""" + PERCENTAGE = "percentage" + """Criteria applies for given percentage value.""" + AVERAGE = "avg" + """Criteria applies for avg value.""" + PERCENTILE50 = "p50" + """Criteria applies for 50th percentile value.""" + PERCENTILE75 = "p75" + """Criteria applies for 75th percentile value.""" + PERCENTILE90 = "p90" + """Criteria applies for 90th percentile value.""" + PERCENTILE95 = "p95" + """Criteria applies for 95th percentile value.""" + PERCENTILE96 = "p96" + """Criteria applies for 96th percentile value.""" + PERCENTILE97 = "p97" + """Criteria applies for 97th percentile value.""" + PERCENTILE98 = "p98" + """Criteria applies for 98th percentile value.""" + PERCENTILE99 = "p99" + """Criteria applies for 99th percentile value.""" + PERCENTILE999 = "p99.9" + """Criteria applies for 99.9th percentile value.""" + PERCENTILE9999 = "p99.99" + """Criteria applies for 99.99th percentile value.""" + MINIMUM = "min" + """Criteria applies for minimum value.""" + MAXIMUM = "max" + """Criteria applies for maximum value.""" + + +class PassFailResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Pass/fail criteria result.""" + + PASSED = "passed" + """Given pass fail criteria metric has passed.""" + UNDETERMINED = "undetermined" + """Given pass fail criteria metric couldn't determine.""" + FAILED = "failed" + """Given pass fail criteria metric has failed.""" + + +class PassFailTestResult(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test result based on pass/fail criteria.""" + + PASSED = "PASSED" + """Pass/fail criteria has passed.""" + NOT_APPLICABLE = "NOT_APPLICABLE" + """Pass/fail criteria is not applicable.""" + FAILED = "FAILED" + """Pass/fail criteria has failed.""" + + +class PFMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metrics for pass/fail criteria.""" + + RESPONSE_TIME_IN_MILLISECONDS = "response_time_ms" + """Pass fail criteria for response time metric in milliseconds.""" + LATENCY = "latency" + """Pass fail criteria for latency metric in milliseconds.""" + ERROR = "error" + """Pass fail criteria for error metric.""" + REQUESTS = "requests" + """Pass fail criteria for total requests.""" + REQUESTS_PER_SECOND = "requests_per_sec" + """Pass fail criteria for request per second.""" + + +class RecommendationCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of Recommendation.""" + + THROUGHPUT_OPTIMIZED = "ThroughputOptimized" + """The recommendation for this category optimizes the throughput/RPS (Requests per Second) of the + app.""" + COST_OPTIMIZED = "CostOptimized" + """The recommendation for this category optimizes the cost of the app.""" + + +class RequestDataLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Request data collection level for test run.""" + + NONE = "NONE" + """No request data will be collected""" + ERRORS = "ERRORS" + """Request data will be collected in case of failed requests""" + + +class ResourceKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Kind of the resource on which test profile is created.""" + + FUNCTIONS_FLEX_CONSUMPTION = "FunctionsFlexConsumption" + """Resource is a Azure FunctionApp on Flex Consumption Plan.""" + + +class SecretType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Types of secrets supported.""" + + KEY_VAULT_SECRET_URI = "AKV_SECRET_URI" + """If the secret is stored in an Azure Key Vault.""" + SECRET_VALUE = "SECRET_VALUE" + """If the secret value provided as plain text.""" + + +class TestKind(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test kind.""" + + URL = "URL" + """URL Test""" + JMX = "JMX" + """JMX Test""" + LOCUST = "Locust" + """Locust Test""" + + +class TestProfileRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test profile run status.""" + + ACCEPTED = "ACCEPTED" + """Test profile run request is accepted.""" + NOT_STARTED = "NOTSTARTED" + """Test profile run is not yet started.""" + EXECUTING = "EXECUTING" + """Test profile run has started executing.""" + DONE = "DONE" + """Test profile run has completed successfully.""" + CANCELLING = "CANCELLING" + """Test profile run is being cancelled.""" + CANCELLED = "CANCELLED" + """Test profile run is cancelled.""" + FAILED = "FAILED" + """Test profile run has failed.""" + + +class TestRunStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Test run status.""" + + ACCEPTED = "ACCEPTED" + """Test run request is accepted.""" + NOT_STARTED = "NOTSTARTED" + """Test run is not yet started.""" + PROVISIONING = "PROVISIONING" + """Test run is provisioning.""" + PROVISIONED = "PROVISIONED" + """Test run is provisioned.""" + CONFIGURING = "CONFIGURING" + """Test run is getting configured.""" + CONFIGURED = "CONFIGURED" + """Test run configuration is done.""" + EXECUTING = "EXECUTING" + """Test run has started executing.""" + EXECUTED = "EXECUTED" + """Test run execution is completed.""" + DEPROVISIONING = "DEPROVISIONING" + """Test run is getting deprovisioned.""" + DEPROVISIONED = "DEPROVISIONED" + """Test run is deprovisioned.""" + DONE = "DONE" + """Test run is completed.""" + CANCELLING = "CANCELLING" + """Test run is being cancelled.""" + CANCELLED = "CANCELLED" + """Test run request is cancelled.""" + FAILED = "FAILED" + """Test run request is failed.""" + VALIDATION_SUCCESS = "VALIDATION_SUCCESS" + """Test run JMX file is validated.""" + VALIDATION_FAILURE = "VALIDATION_FAILURE" + """Test run JMX file validation is failed.""" + + +class TimeGrain(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Time Grain.""" + + PT5S = "PT5S" + """5 seconds, available only if test run duration is less than 10 minutes.""" + PT10S = "PT10S" + """10 seconds, available only if test run duration is less than 10 minutes.""" + PT1M = "PT1M" + """1 minute""" + PT5M = "PT5M" + """5 minutes, available only if test run duration is greater than 1 minute.""" + PT1H = "PT1H" + """1 hour, available only if test run duration is greater than 1 minute.""" diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py new file mode 100644 index 000000000000..6236ad805eab --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_models.py @@ -0,0 +1,2599 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Dict, List, Literal, Mapping, Optional, TYPE_CHECKING, Union, overload + +from ....customizations import _model_base +from ....customizations._model_base import rest_discriminator, rest_field +from ._enums import ResourceKind + +if TYPE_CHECKING: + from .. import models as _models + + +class AppComponent(_model_base.Model): + """An Azure resource object (Refer azure generic resource model + :`https://learn.microsoft.com/en-us/rest/api/resources/resources/get-by-id#genericresource + `_). + + :ivar resource_id: fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + Required. + :vartype resource_id: str + :ivar resource_name: Azure resource name, required while creating the app component. Required. + :vartype resource_name: str + :ivar resource_type: Azure resource type, required while creating the app component. Required. + :vartype resource_type: str + :ivar display_name: Azure resource display name. + :vartype display_name: str + :ivar resource_group: Resource group name of the Azure resource. + :vartype resource_group: str + :ivar subscription_id: Subscription Id of the Azure resource. + :vartype subscription_id: str + :ivar kind: Kind of Azure resource type. + :vartype kind: str + """ + + resource_id: str = rest_field(name="resourceId", visibility=["read"]) + """fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}. + Required.""" + resource_name: str = rest_field(name="resourceName", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource name, required while creating the app component. Required.""" + resource_type: str = rest_field(name="resourceType", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource type, required while creating the app component. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource display name.""" + resource_group: Optional[str] = rest_field(name="resourceGroup", visibility=["read"]) + """Resource group name of the Azure resource.""" + subscription_id: Optional[str] = rest_field(name="subscriptionId", visibility=["read"]) + """Subscription Id of the Azure resource.""" + kind: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Kind of Azure resource type.""" + + @overload + def __init__( + self, + *, + resource_name: str, + resource_type: str, + display_name: Optional[str] = None, + kind: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ArtifactsContainerInfo(_model_base.Model): + """Artifacts container info. + + :ivar url: This is a SAS URI to an Azure Storage Container that contains the test run + artifacts. + :vartype url: str + :ivar expire_date_time: Expiry time of the container (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + """ + + url: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """This is a SAS URI to an Azure Storage Container that contains the test run artifacts.""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """Expiry time of the container (RFC 3339 literal format).""" + + @overload + def __init__( + self, + *, + url: Optional[str] = None, + expire_date_time: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class AutoStopCriteria(_model_base.Model): + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window. + + :ivar auto_stop_disabled: Whether auto-stop should be disabled. The default value is false. + :vartype auto_stop_disabled: bool + :ivar error_rate: Threshold percentage of errors on which test run should be automatically + stopped. Allowed values are in range of 0.0-100.0. + :vartype error_rate: float + :ivar error_rate_time_window_in_seconds: Time window during which the error percentage should + be evaluated in seconds. + :vartype error_rate_time_window_in_seconds: int + """ + + auto_stop_disabled: Optional[bool] = rest_field( + name="autoStopDisabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Whether auto-stop should be disabled. The default value is false.""" + error_rate: Optional[float] = rest_field( + name="errorRate", visibility=["read", "create", "update", "delete", "query"] + ) + """Threshold percentage of errors on which test run should be automatically stopped. Allowed + values are in range of 0.0-100.0.""" + error_rate_time_window_in_seconds: Optional[int] = rest_field( + name="errorRateTimeWindowInSeconds", visibility=["read", "create", "update", "delete", "query"] + ) + """Time window during which the error percentage should be evaluated in seconds.""" + + @overload + def __init__( + self, + *, + auto_stop_disabled: Optional[bool] = None, + error_rate: Optional[float] = None, + error_rate_time_window_in_seconds: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CertificateMetadata(_model_base.Model): + """Certificates metadata. + + :ivar value: The value of the certificate for respective type. + :vartype value: str + :ivar type: Type of certificate. "AKV_CERT_URI" + :vartype type: str or ~customizations.models.CertificateType + :ivar name: Name of the certificate. + :vartype name: str + """ + + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the certificate for respective type.""" + type: Optional[Union[str, "_models.CertificateType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of certificate. \"AKV_CERT_URI\"""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Name of the certificate.""" + + @overload + def __init__( + self, + *, + value: Optional[str] = None, + type: Optional[Union[str, "_models.CertificateType"]] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DimensionFilter(_model_base.Model): + """Dimension name and values to filter. + + :ivar name: The dimension name. + :vartype name: str + :ivar values_property: The dimension values. Maximum values can be 20. + :vartype values_property: list[str] + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The dimension name.""" + values_property: Optional[List[str]] = rest_field( + name="values", visibility=["read", "create", "update", "delete", "query"] + ) + """The dimension values. Maximum values can be 20.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + values_property: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class DimensionValue(_model_base.Model): + """Represents a metric dimension value. + + :ivar name: The name of the dimension. + :vartype name: str + :ivar value: The value of the dimension. + :vartype value: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the dimension.""" + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the dimension.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + value: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorDetails(_model_base.Model): + """Error details if there is any failure in load test run. + + :ivar message: Error details in case test run was not successfully run. + :vartype message: str + """ + + message: Optional[str] = rest_field(visibility=["read"]) + """Error details in case test run was not successfully run.""" + + +class FunctionFlexConsumptionResourceConfiguration(_model_base.Model): # pylint: disable=name-too-long + """Resource configuration instance for a Flex Consumption based Azure Function App. + + :ivar instance_memory_mb: Memory size of the instance. Supported values are 2048, 4096. + Required. + :vartype instance_memory_mb: int + :ivar http_concurrency: HTTP Concurrency for the function app. + :vartype http_concurrency: int + """ + + instance_memory_mb: int = rest_field( + name="instanceMemoryMB", visibility=["read", "create", "update", "delete", "query"] + ) + """Memory size of the instance. Supported values are 2048, 4096. Required.""" + http_concurrency: Optional[int] = rest_field( + name="httpConcurrency", visibility=["read", "create", "update", "delete", "query"] + ) + """HTTP Concurrency for the function app.""" + + @overload + def __init__( + self, + *, + instance_memory_mb: int, + http_concurrency: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TargetResourceConfigurations(_model_base.Model): + """Configurations of a target resource. This varies with the kind of resource. + + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FunctionFlexConsumptionTargetResourceConfigurations + + :ivar kind: Kind of the resource for which the configurations apply. Required. + "FunctionsFlexConsumption" + :vartype kind: str or ~customizations.models.ResourceKind + """ + + __mapping__: Dict[str, _model_base.Model] = {} + kind: str = rest_discriminator(name="kind", visibility=["read", "create"]) + """Kind of the resource for which the configurations apply. Required. \"FunctionsFlexConsumption\"""" + + @overload + def __init__( + self, + *, + kind: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FunctionFlexConsumptionTargetResourceConfigurations( + TargetResourceConfigurations, discriminator="FunctionsFlexConsumption" +): # pylint: disable=name-too-long + """Configurations for a Function App using Flex Consumption Plan. + + :ivar kind: The kind value to use when providing configuration. + This should typically be not changed from its value. Required. Resource is a Azure FunctionApp + on Flex Consumption Plan. + :vartype kind: str or ~customizations.models.FUNCTIONS_FLEX_CONSUMPTION + :ivar configurations: A map of configurations for a Function app using Flex Consumption Plan. + :vartype configurations: dict[str, + ~customizations.models.FunctionFlexConsumptionResourceConfiguration] + """ + + kind: Literal[ResourceKind.FUNCTIONS_FLEX_CONSUMPTION] = rest_discriminator(name="kind", visibility=["read", "create", "update", "delete", "query"]) # type: ignore + """The kind value to use when providing configuration. + This should typically be not changed from its value. Required. Resource is a Azure FunctionApp + on Flex Consumption Plan.""" + configurations: Optional[Dict[str, "_models.FunctionFlexConsumptionResourceConfiguration"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """A map of configurations for a Function app using Flex Consumption Plan.""" + + @overload + def __init__( + self, + *, + configurations: Optional[Dict[str, "_models.FunctionFlexConsumptionResourceConfiguration"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, kind=ResourceKind.FUNCTIONS_FLEX_CONSUMPTION, **kwargs) + + +class LoadTestConfiguration(_model_base.Model): + """Configurations for the load test. + + :ivar engine_instances: The number of engine instances to execute load test. Supported values + are in range of 1-400. Required for creating a new test. + :vartype engine_instances: int + :ivar split_all_csvs: If false, Azure Load Testing copies and processes your input files + unmodified + across all test engine instances. If true, Azure Load Testing splits the CSV + input data evenly across all engine instances. If you provide multiple CSV + files, each file will be split evenly. + :vartype split_all_csvs: bool + :ivar quick_start_test: If true, optionalLoadTestConfig is required and JMX script for the load + test is + not required to upload. + :vartype quick_start_test: bool + :ivar optional_load_test_config: Configuration for quick load test. + :vartype optional_load_test_config: ~customizations.models.OptionalLoadTestConfiguration + :ivar regional_load_test_config: Region distribution configuration for the load test. + :vartype regional_load_test_config: list[~customizations.models.RegionalConfiguration] + """ + + engine_instances: Optional[int] = rest_field( + name="engineInstances", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of engine instances to execute load test. Supported values are in range of 1-400. + Required for creating a new test.""" + split_all_csvs: Optional[bool] = rest_field( + name="splitAllCSVs", visibility=["read", "create", "update", "delete", "query"] + ) + """If false, Azure Load Testing copies and processes your input files unmodified + across all test engine instances. If true, Azure Load Testing splits the CSV + input data evenly across all engine instances. If you provide multiple CSV + files, each file will be split evenly.""" + quick_start_test: Optional[bool] = rest_field( + name="quickStartTest", visibility=["read", "create", "update", "delete", "query"] + ) + """If true, optionalLoadTestConfig is required and JMX script for the load test is + not required to upload.""" + optional_load_test_config: Optional["_models.OptionalLoadTestConfiguration"] = rest_field( + name="optionalLoadTestConfig", visibility=["read", "create", "update", "delete", "query"] + ) + """Configuration for quick load test.""" + regional_load_test_config: Optional[List["_models.RegionalConfiguration"]] = rest_field( + name="regionalLoadTestConfig", visibility=["read", "create", "update", "delete", "query"] + ) + """Region distribution configuration for the load test.""" + + @overload + def __init__( + self, + *, + engine_instances: Optional[int] = None, + split_all_csvs: Optional[bool] = None, + quick_start_test: Optional[bool] = None, + optional_load_test_config: Optional["_models.OptionalLoadTestConfiguration"] = None, + regional_load_test_config: Optional[List["_models.RegionalConfiguration"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricAvailability(_model_base.Model): + """Metric availability specifies the time grain (aggregation interval or frequency). + + :ivar time_grain: The time grain specifies the aggregation interval for the metric. Expressed + as + a duration 'PT1M', 'PT1H', etc. Known values are: "PT5S", "PT10S", "PT1M", "PT5M", and "PT1H". + :vartype time_grain: str or ~customizations.models.TimeGrain + """ + + time_grain: Optional[Union[str, "_models.TimeGrain"]] = rest_field( + name="timeGrain", visibility=["read", "create", "update", "delete", "query"] + ) + """The time grain specifies the aggregation interval for the metric. Expressed as + a duration 'PT1M', 'PT1H', etc. Known values are: \"PT5S\", \"PT10S\", \"PT1M\", \"PT5M\", and + \"PT1H\".""" + + @overload + def __init__( + self, + *, + time_grain: Optional[Union[str, "_models.TimeGrain"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricDefinition(_model_base.Model): + """Metric definition. + + :ivar dimensions: List of dimensions. + :vartype dimensions: list[~customizations.models.NameAndDescription] + :ivar description: The metric description. + :vartype description: str + :ivar name: The metric name. + :vartype name: str + :ivar namespace: The namespace the metric belongs to. + :vartype namespace: str + :ivar primary_aggregation_type: The primary aggregation type value defining how to use the + values for display. Known values are: "Average", "Count", "None", "Total", "Percentile75", + "Percentile90", "Percentile95", "Percentile96", "Percentile97", "Percentile98", "Percentile99", + "Percentile999", and "Percentile9999". + :vartype primary_aggregation_type: str or ~customizations.models.Aggregation + :ivar supported_aggregation_types: The collection of what all aggregation types are supported. + :vartype supported_aggregation_types: list[str] + :ivar unit: The unit of the metric. Known values are: "NotSpecified", "Percent", "Count", + "Seconds", "Milliseconds", "Bytes", "BytesPerSecond", and "CountPerSecond". + :vartype unit: str or ~customizations.models.MetricUnit + :ivar metric_availabilities: Metric availability specifies the time grain (aggregation interval + or + frequency). + :vartype metric_availabilities: list[~customizations.models.MetricAvailability] + """ + + dimensions: Optional[List["_models.NameAndDescription"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """List of dimensions.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric name.""" + namespace: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace the metric belongs to.""" + primary_aggregation_type: Optional[Union[str, "_models.Aggregation"]] = rest_field( + name="primaryAggregationType", visibility=["read", "create", "update", "delete", "query"] + ) + """The primary aggregation type value defining how to use the values for display. Known values + are: \"Average\", \"Count\", \"None\", \"Total\", \"Percentile75\", \"Percentile90\", + \"Percentile95\", \"Percentile96\", \"Percentile97\", \"Percentile98\", \"Percentile99\", + \"Percentile999\", and \"Percentile9999\".""" + supported_aggregation_types: Optional[List[str]] = rest_field( + name="supportedAggregationTypes", visibility=["read", "create", "update", "delete", "query"] + ) + """The collection of what all aggregation types are supported.""" + unit: Optional[Union[str, "_models.MetricUnit"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The unit of the metric. Known values are: \"NotSpecified\", \"Percent\", \"Count\", + \"Seconds\", \"Milliseconds\", \"Bytes\", \"BytesPerSecond\", and \"CountPerSecond\".""" + metric_availabilities: Optional[List["_models.MetricAvailability"]] = rest_field( + name="metricAvailabilities", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric availability specifies the time grain (aggregation interval or + frequency).""" + + @overload + def __init__( + self, + *, + dimensions: Optional[List["_models.NameAndDescription"]] = None, + description: Optional[str] = None, + name: Optional[str] = None, + namespace: Optional[str] = None, + primary_aggregation_type: Optional[Union[str, "_models.Aggregation"]] = None, + supported_aggregation_types: Optional[List[str]] = None, + unit: Optional[Union[str, "_models.MetricUnit"]] = None, + metric_availabilities: Optional[List["_models.MetricAvailability"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricDefinitionCollection(_model_base.Model): + """Represents collection of metric definitions. + + :ivar value: the values for the metric definitions. Required. + :vartype value: list[~customizations.models.MetricDefinition] + """ + + value: List["_models.MetricDefinition"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """the values for the metric definitions. Required.""" + + @overload + def __init__( + self, + *, + value: List["_models.MetricDefinition"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricNamespace(_model_base.Model): + """Metric namespace class specifies the metadata for a metric namespace. + + :ivar description: The namespace description. + :vartype description: str + :ivar name: The metric namespace name. + :vartype name: str + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The namespace description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric namespace name.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricNamespaceCollection(_model_base.Model): + """Represents collection of metric namespaces. + + :ivar value: The values for the metric namespaces. Required. + :vartype value: list[~customizations.models.MetricNamespace] + """ + + value: List["_models.MetricNamespace"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The values for the metric namespaces. Required.""" + + @overload + def __init__( + self, + *, + value: List["_models.MetricNamespace"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricRequestPayload(_model_base.Model): + """Filters to fetch the set of metric. + + :ivar filters: Get metrics for specific dimension values. Example: Metric contains dimension + like SamplerName, Error. To retrieve all the time series data where SamplerName + is equals to HTTPRequest1 or HTTPRequest2, the DimensionFilter value will be + {"SamplerName", ["HTTPRequest1", "HTTPRequest2"}. + :vartype filters: list[~customizations.models.DimensionFilter] + """ + + filters: Optional[List["_models.DimensionFilter"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Get metrics for specific dimension values. Example: Metric contains dimension + like SamplerName, Error. To retrieve all the time series data where SamplerName + is equals to HTTPRequest1 or HTTPRequest2, the DimensionFilter value will be + {\"SamplerName\", [\"HTTPRequest1\", \"HTTPRequest2\"}.""" + + @overload + def __init__( + self, + *, + filters: Optional[List["_models.DimensionFilter"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class MetricValue(_model_base.Model): + """Represents a metric value. + + :ivar timestamp: The timestamp for the metric value in RFC 3339 format. + :vartype timestamp: ~datetime.datetime + :ivar value: The metric value. + :vartype value: float + """ + + timestamp: Optional[datetime.datetime] = rest_field( + visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp for the metric value in RFC 3339 format.""" + value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The metric value.""" + + @overload + def __init__( + self, + *, + timestamp: Optional[datetime.datetime] = None, + value: Optional[float] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NameAndDescription(_model_base.Model): + """The name and description. + + :ivar description: The description. + :vartype description: str + :ivar name: The name. + :vartype name: str + """ + + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The description.""" + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name.""" + + @overload + def __init__( + self, + *, + description: Optional[str] = None, + name: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OptionalLoadTestConfiguration(_model_base.Model): + """Configuration for quick load test. + + :ivar endpoint_url: Test URL. Provide the complete HTTP URL. For example, + `https://contoso-app.azurewebsites.net/login `_. + :vartype endpoint_url: str + :ivar requests_per_second: Target throughput (requests per second). This may not be necessarily + achieved. The actual throughput will be lower if the application is not capable of handling it. + :vartype requests_per_second: int + :ivar max_response_time_in_ms: Maximum response time in milliseconds of the API/endpoint. + :vartype max_response_time_in_ms: int + :ivar virtual_users: No of concurrent virtual users. + :vartype virtual_users: int + :ivar ramp_up_time: Ramp up time in seconds. + :vartype ramp_up_time: int + :ivar duration: Test run duration in seconds. + :vartype duration: int + """ + + endpoint_url: Optional[str] = rest_field( + name="endpointUrl", visibility=["read", "create", "update", "delete", "query"] + ) + """Test URL. Provide the complete HTTP URL. For example, + `https://contoso-app.azurewebsites.net/login `_.""" + requests_per_second: Optional[int] = rest_field( + name="requestsPerSecond", visibility=["read", "create", "update", "delete", "query"] + ) + """Target throughput (requests per second). This may not be necessarily achieved. The actual + throughput will be lower if the application is not capable of handling it.""" + max_response_time_in_ms: Optional[int] = rest_field( + name="maxResponseTimeInMs", visibility=["read", "create", "update", "delete", "query"] + ) + """Maximum response time in milliseconds of the API/endpoint.""" + virtual_users: Optional[int] = rest_field( + name="virtualUsers", visibility=["read", "create", "update", "delete", "query"] + ) + """No of concurrent virtual users.""" + ramp_up_time: Optional[int] = rest_field( + name="rampUpTime", visibility=["read", "create", "update", "delete", "query"] + ) + """Ramp up time in seconds.""" + duration: Optional[int] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Test run duration in seconds.""" + + @overload + def __init__( + self, + *, + endpoint_url: Optional[str] = None, + requests_per_second: Optional[int] = None, + max_response_time_in_ms: Optional[int] = None, + virtual_users: Optional[int] = None, + ramp_up_time: Optional[int] = None, + duration: Optional[int] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailCriteria(_model_base.Model): + """Pass fail criteria for a test. + + :ivar pass_fail_metrics: Map of id and pass fail metrics { id : pass fail metrics }. + :vartype pass_fail_metrics: dict[str, ~customizations.models.PassFailMetric] + :ivar pass_fail_server_metrics: Map of id and pass fail server metrics { id : pass fail + metrics }. + :vartype pass_fail_server_metrics: dict[str, ~customizations.models.PassFailServerMetric] + """ + + pass_fail_metrics: Optional[Dict[str, "_models.PassFailMetric"]] = rest_field( + name="passFailMetrics", visibility=["read", "create", "update", "delete", "query"] + ) + """Map of id and pass fail metrics { id : pass fail metrics }.""" + pass_fail_server_metrics: Optional[Dict[str, "_models.PassFailServerMetric"]] = rest_field( + name="passFailServerMetrics", visibility=["read", "create", "update", "delete", "query"] + ) + """Map of id and pass fail server metrics { id : pass fail metrics }.""" + + @overload + def __init__( + self, + *, + pass_fail_metrics: Optional[Dict[str, "_models.PassFailMetric"]] = None, + pass_fail_server_metrics: Optional[Dict[str, "_models.PassFailServerMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailMetric(_model_base.Model): + """Pass fail metric. + + :ivar client_metric: The client metric on which the criteria should be applied. Known values + are: "response_time_ms", "latency", "error", "requests", and "requests_per_sec". + :vartype client_metric: str or ~customizations.models.PFMetrics + :ivar aggregate: The aggregation function to be applied on the client metric. Allowed functions + + * ‘percentage’ - for error metric , ‘avg’, percentiles like ‘p50’, ‘p90’, & so on, ‘min’, + ‘max’ - for response_time_ms and latency metric, ‘avg’ - for requests_per_sec, + ‘count’ - for requests. Known values are: "count", "percentage", "avg", "p50", "p75", "p90", + "p95", "p96", "p97", "p98", "p99", "p99.9", "p99.99", "min", and "max". + :vartype aggregate: str or ~customizations.models.PassFailAggregationFunction + :ivar condition: The comparison operator. Supported types ‘>’, ‘<’. + :vartype condition: str + :ivar request_name: Request name for which the Pass fail criteria has to be applied. + :vartype request_name: str + :ivar value: The value to compare with the client metric. Allowed values - ‘error : [0.0 , + 100.0] unit- % ’, response_time_ms and latency : any integer value unit- ms. + :vartype value: float + :ivar action: Action taken after the threshold is met. Default is ‘continue’. Known values are: + "continue" and "stop". + :vartype action: str or ~customizations.models.PassFailAction + :ivar actual_value: The actual value of the client metric for the test run. + :vartype actual_value: float + :ivar result: Outcome of the test run. Known values are: "passed", "undetermined", and + "failed". + :vartype result: str or ~customizations.models.PassFailResult + """ + + client_metric: Optional[Union[str, "_models.PFMetrics"]] = rest_field( + name="clientMetric", visibility=["read", "create", "update", "delete", "query"] + ) + """The client metric on which the criteria should be applied. Known values are: + \"response_time_ms\", \"latency\", \"error\", \"requests\", and \"requests_per_sec\".""" + aggregate: Optional[Union[str, "_models.PassFailAggregationFunction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The aggregation function to be applied on the client metric. Allowed functions + + * ‘percentage’ - for error metric , ‘avg’, percentiles like ‘p50’, ‘p90’, & so on, ‘min’, + ‘max’ - for response_time_ms and latency metric, ‘avg’ - for requests_per_sec, + ‘count’ - for requests. Known values are: \"count\", \"percentage\", \"avg\", \"p50\", \"p75\", + \"p90\", \"p95\", \"p96\", \"p97\", \"p98\", \"p99\", \"p99.9\", \"p99.99\", \"min\", and + \"max\".""" + condition: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The comparison operator. Supported types ‘>’, ‘<’.""" + request_name: Optional[str] = rest_field( + name="requestName", visibility=["read", "create", "update", "delete", "query"] + ) + """Request name for which the Pass fail criteria has to be applied.""" + value: Optional[float] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare with the client metric. Allowed values - ‘error : [0.0 , + 100.0] unit- % ’, response_time_ms and latency : any integer value unit- ms.""" + action: Optional[Union[str, "_models.PassFailAction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Action taken after the threshold is met. Default is ‘continue’. Known values are: \"continue\" + and \"stop\".""" + actual_value: Optional[float] = rest_field(name="actualValue", visibility=["read"]) + """The actual value of the client metric for the test run.""" + result: Optional[Union[str, "_models.PassFailResult"]] = rest_field(visibility=["read"]) + """Outcome of the test run. Known values are: \"passed\", \"undetermined\", and \"failed\".""" + + @overload + def __init__( + self, + *, + client_metric: Optional[Union[str, "_models.PFMetrics"]] = None, + aggregate: Optional[Union[str, "_models.PassFailAggregationFunction"]] = None, + condition: Optional[str] = None, + request_name: Optional[str] = None, + value: Optional[float] = None, + action: Optional[Union[str, "_models.PassFailAction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PassFailServerMetric(_model_base.Model): + """Pass fail server metric. + + :ivar resource_id: The resource id of the resource emitting the metric. Required. + :vartype resource_id: str + :ivar metric_namespace: The server metric namespace. Required. + :vartype metric_namespace: str + :ivar metric_name: The server metric name. Required. + :vartype metric_name: str + :ivar aggregation: Aggregation Type. Required. + :vartype aggregation: str + :ivar condition: The comparison operator. Supported types ‘>’, ‘<’. Required. + :vartype condition: str + :ivar value: The value to compare with the server metric. Required. + :vartype value: float + :ivar action: Action taken after the threshold is met. Default is ‘continue’. Known values are: + "continue" and "stop". + :vartype action: str or ~customizations.models.PassFailAction + :ivar actual_value: The actual value of the server metric. + :vartype actual_value: float + :ivar result: Outcome of the test run. Known values are: "passed", "undetermined", and + "failed". + :vartype result: str or ~customizations.models.PassFailResult + """ + + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """The resource id of the resource emitting the metric. Required.""" + metric_namespace: str = rest_field( + name="metricNamespace", visibility=["read", "create", "update", "delete", "query"] + ) + """The server metric namespace. Required.""" + metric_name: str = rest_field(name="metricName", visibility=["read", "create", "update", "delete", "query"]) + """The server metric name. Required.""" + aggregation: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Aggregation Type. Required.""" + condition: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The comparison operator. Supported types ‘>’, ‘<’. Required.""" + value: float = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value to compare with the server metric. Required.""" + action: Optional[Union[str, "_models.PassFailAction"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Action taken after the threshold is met. Default is ‘continue’. Known values are: \"continue\" + and \"stop\".""" + actual_value: Optional[float] = rest_field(name="actualValue", visibility=["read"]) + """The actual value of the server metric.""" + result: Optional[Union[str, "_models.PassFailResult"]] = rest_field(visibility=["read"]) + """Outcome of the test run. Known values are: \"passed\", \"undetermined\", and \"failed\".""" + + @overload + def __init__( + self, + *, + resource_id: str, + metric_namespace: str, + metric_name: str, + aggregation: str, + condition: str, + value: float, + action: Optional[Union[str, "_models.PassFailAction"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class RegionalConfiguration(_model_base.Model): + """Region distribution configuration for the load test. + + :ivar engine_instances: The number of engine instances to execute load test in specified + region. Supported values are in range of 1-400. Required. + :vartype engine_instances: int + :ivar region: Azure region name. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as "eastus". + The region name must match one of the strings in the "Name" column returned from running the + "az account list-locations -o table" Azure CLI command. Required. + :vartype region: str + """ + + engine_instances: int = rest_field( + name="engineInstances", visibility=["read", "create", "update", "delete", "query"] + ) + """ The number of engine instances to execute load test in specified region. Supported values are + in range of 1-400. Required.""" + region: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Azure region name. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as \"eastus\". + The region name must match one of the strings in the \"Name\" column returned from running the + \"az account list-locations -o table\" Azure CLI command. Required.""" + + @overload + def __init__( + self, + *, + engine_instances: int, + region: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ResourceMetric(_model_base.Model): + """Associated metric definition for particular metrics of the azure resource ( + Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_). + + :ivar id: Unique name for metric. + :vartype id: str + :ivar resource_id: Azure resource id. Required. + :vartype resource_id: str + :ivar metric_namespace: Metric name space. Required. + :vartype metric_namespace: str + :ivar display_description: Metric description. + :vartype display_description: str + :ivar name: The invariant value of metric name. Required. + :vartype name: str + :ivar aggregation: Metric aggregation. Required. + :vartype aggregation: str + :ivar unit: Metric unit. + :vartype unit: str + :ivar resource_type: Azure resource type. Required. + :vartype resource_type: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Unique name for metric.""" + resource_id: str = rest_field(name="resourceId", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource id. Required.""" + metric_namespace: str = rest_field( + name="metricNamespace", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric name space. Required.""" + display_description: Optional[str] = rest_field( + name="displayDescription", visibility=["read", "create", "update", "delete", "query"] + ) + """Metric description.""" + name: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The invariant value of metric name. Required.""" + aggregation: str = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric aggregation. Required.""" + unit: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Metric unit.""" + resource_type: str = rest_field(name="resourceType", visibility=["read", "create", "update", "delete", "query"]) + """Azure resource type. Required.""" + + @overload + def __init__( + self, + *, + resource_id: str, + metric_namespace: str, + name: str, + aggregation: str, + resource_type: str, + display_description: Optional[str] = None, + unit: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Secret(_model_base.Model): + """Secret. + + :ivar value: The value of the secret for the respective type. + :vartype value: str + :ivar type: Type of secret. Known values are: "AKV_SECRET_URI" and "SECRET_VALUE". + :vartype type: str or ~customizations.models.SecretType + """ + + value: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The value of the secret for the respective type.""" + type: Optional[Union[str, "_models.SecretType"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Type of secret. Known values are: \"AKV_SECRET_URI\" and \"SECRET_VALUE\".""" + + @overload + def __init__( + self, + *, + value: Optional[str] = None, + type: Optional[Union[str, "_models.SecretType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Test(_model_base.Model): + """Load test model. + + :ivar pass_fail_criteria: Pass fail criteria for a test. + :vartype pass_fail_criteria: ~customizations.models.PassFailCriteria + :ivar auto_stop_criteria: Auto stop criteria for a test. This will automatically stop a load + test if the error percentage is high for a certain time window. + :vartype auto_stop_criteria: ~customizations.models.AutoStopCriteria + :ivar secrets: Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE. + :vartype secrets: dict[str, ~customizations.models.Secret] + :ivar certificate: Certificates metadata. + :vartype certificate: ~customizations.models.CertificateMetadata + :ivar environment_variables: Environment variables which are defined as a set of + pairs. + :vartype environment_variables: dict[str, str] + :ivar load_test_configuration: The load test configuration. + :vartype load_test_configuration: ~customizations.models.LoadTestConfiguration + :ivar baseline_test_run_id: Id of the test run to be marked as baseline to view trends of + client-side metrics from recent test runs. + :vartype baseline_test_run_id: str + :ivar input_artifacts: The input artifacts for the test. + :vartype input_artifacts: ~customizations.models.TestInputArtifacts + :ivar test_id: Unique test identifier for the load test, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_id: str + :ivar description: The test description. + :vartype description: str + :ivar display_name: Display name of a test. + :vartype display_name: str + :ivar subnet_id: Subnet ID on which the load test instances should run. + :vartype subnet_id: str + :ivar kind: Kind of test. Known values are: "URL", "JMX", and "Locust". + :vartype kind: str or ~customizations.models.TestKind + :ivar public_ip_disabled: Inject load test engines without deploying public IP for outbound + access. + :vartype public_ip_disabled: bool + :ivar keyvault_reference_identity_type: Type of the managed identity referencing the Key vault. + :vartype keyvault_reference_identity_type: str + :ivar keyvault_reference_identity_id: Resource Id of the managed identity referencing the Key + vault. + :vartype keyvault_reference_identity_id: str + :ivar metrics_reference_identity_type: Type of the managed identity referencing the metrics. + Known values are: "SystemAssigned" and "UserAssigned". + :vartype metrics_reference_identity_type: str or ~customizations.models.ManagedIdentityType + :ivar metrics_reference_identity_id: Resource Id of the managed identity referencing the + metrics. + :vartype metrics_reference_identity_id: str + :ivar engine_built_in_identity_type: Type of the managed identity built in load test engines. + Known values are: "SystemAssigned" and "UserAssigned". + :vartype engine_built_in_identity_type: str or ~customizations.models.ManagedIdentityType + :ivar engine_built_in_identity_ids: Resource Ids of the managed identity built in to load test + engines. Required if engineBuiltInIdentityType is UserAssigned. + :vartype engine_built_in_identity_ids: list[str] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + pass_fail_criteria: Optional["_models.PassFailCriteria"] = rest_field( + name="passFailCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Pass fail criteria for a test.""" + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = rest_field( + name="autoStopCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window.""" + secrets: Optional[Dict[str, "_models.Secret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE.""" + certificate: Optional["_models.CertificateMetadata"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Certificates metadata.""" + environment_variables: Optional[Dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables which are defined as a set of pairs.""" + load_test_configuration: Optional["_models.LoadTestConfiguration"] = rest_field( + name="loadTestConfiguration", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test configuration.""" + baseline_test_run_id: Optional[str] = rest_field( + name="baselineTestRunId", visibility=["read", "create", "update", "delete", "query"] + ) + """Id of the test run to be marked as baseline to view trends of client-side metrics from recent + test runs.""" + input_artifacts: Optional["_models.TestInputArtifacts"] = rest_field(name="inputArtifacts", visibility=["read"]) + """The input artifacts for the test.""" + test_id: str = rest_field(name="testId", visibility=["read"]) + """Unique test identifier for the load test, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test description.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of a test.""" + subnet_id: Optional[str] = rest_field(name="subnetId", visibility=["read", "create", "update", "delete", "query"]) + """Subnet ID on which the load test instances should run.""" + kind: Optional[Union[str, "_models.TestKind"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Kind of test. Known values are: \"URL\", \"JMX\", and \"Locust\".""" + public_ip_disabled: Optional[bool] = rest_field( + name="publicIPDisabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Inject load test engines without deploying public IP for outbound access.""" + keyvault_reference_identity_type: Optional[str] = rest_field( + name="keyvaultReferenceIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity referencing the Key vault.""" + keyvault_reference_identity_id: Optional[str] = rest_field( + name="keyvaultReferenceIdentityId", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Id of the managed identity referencing the Key vault.""" + metrics_reference_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = rest_field( + name="metricsReferenceIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity referencing the metrics. Known values are: \"SystemAssigned\" and + \"UserAssigned\".""" + metrics_reference_identity_id: Optional[str] = rest_field( + name="metricsReferenceIdentityId", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Id of the managed identity referencing the metrics.""" + engine_built_in_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = rest_field( + name="engineBuiltInIdentityType", visibility=["read", "create", "update", "delete", "query"] + ) + """Type of the managed identity built in load test engines. Known values are: \"SystemAssigned\" + and \"UserAssigned\".""" + engine_built_in_identity_ids: Optional[List[str]] = rest_field( + name="engineBuiltInIdentityIds", visibility=["read", "create", "update", "delete", "query"] + ) + """Resource Ids of the managed identity built in to load test engines. Required if + engineBuiltInIdentityType is UserAssigned.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + pass_fail_criteria: Optional["_models.PassFailCriteria"] = None, + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = None, + secrets: Optional[Dict[str, "_models.Secret"]] = None, + certificate: Optional["_models.CertificateMetadata"] = None, + environment_variables: Optional[Dict[str, str]] = None, + load_test_configuration: Optional["_models.LoadTestConfiguration"] = None, + baseline_test_run_id: Optional[str] = None, + description: Optional[str] = None, + display_name: Optional[str] = None, + subnet_id: Optional[str] = None, + kind: Optional[Union[str, "_models.TestKind"]] = None, + public_ip_disabled: Optional[bool] = None, + keyvault_reference_identity_type: Optional[str] = None, + keyvault_reference_identity_id: Optional[str] = None, + metrics_reference_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = None, + metrics_reference_identity_id: Optional[str] = None, + engine_built_in_identity_type: Optional[Union[str, "_models.ManagedIdentityType"]] = None, + engine_built_in_identity_ids: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestAppComponents(_model_base.Model): + """Test app components. + + :ivar components: Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required. + :vartype components: dict[str, ~customizations.models.AppComponent] + :ivar test_id: Test identifier. + :vartype test_id: str + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + components: Dict[str, "_models.AppComponent"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read"]) + """Test identifier.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + components: Dict[str, "_models.AppComponent"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestFileInfo(_model_base.Model): + """Test file info. + + :ivar file_name: Name of the file. Required. + :vartype file_name: str + :ivar url: File URL. + :vartype url: str + :ivar file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". + :vartype file_type: str or ~customizations.models.FileType + :ivar expire_date_time: Expiry time of the file (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + :ivar validation_status: Validation status of the file. Known values are: "NOT_VALIDATED", + "VALIDATION_SUCCESS", "VALIDATION_FAILURE", "VALIDATION_INITIATED", and + "VALIDATION_NOT_REQUIRED". + :vartype validation_status: str or ~customizations.models.FileValidationStatus + :ivar validation_failure_details: Validation failure error details. + :vartype validation_failure_details: str + """ + + file_name: str = rest_field(name="fileName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the file. Required.""" + url: Optional[str] = rest_field(visibility=["read"]) + """File URL.""" + file_type: Optional[Union[str, "_models.FileType"]] = rest_field(name="fileType", visibility=["read"]) + """File type. Known values are: \"JMX_FILE\", \"USER_PROPERTIES\", \"ADDITIONAL_ARTIFACTS\", + \"ZIPPED_ARTIFACTS\", \"URL_TEST_CONFIG\", and \"TEST_SCRIPT\".""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read"], format="rfc3339" + ) + """Expiry time of the file (RFC 3339 literal format).""" + validation_status: Optional[Union[str, "_models.FileValidationStatus"]] = rest_field( + name="validationStatus", visibility=["read"] + ) + """Validation status of the file. Known values are: \"NOT_VALIDATED\", \"VALIDATION_SUCCESS\", + \"VALIDATION_FAILURE\", \"VALIDATION_INITIATED\", and \"VALIDATION_NOT_REQUIRED\".""" + validation_failure_details: Optional[str] = rest_field(name="validationFailureDetails", visibility=["read"]) + """Validation failure error details.""" + + @overload + def __init__( + self, + *, + file_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestInputArtifacts(_model_base.Model): + """The input artifacts for the test. + + :ivar config_file_info: The load test YAML file that contains the the test configuration. + :vartype config_file_info: ~customizations.models.TestFileInfo + :ivar test_script_file_info: The test script file for the test run. + :vartype test_script_file_info: ~customizations.models.TestFileInfo + :ivar user_prop_file_info: The user properties file. + :vartype user_prop_file_info: ~customizations.models.TestFileInfo + :ivar input_artifacts_zip_file_info: The zip file with all input artifacts. + :vartype input_artifacts_zip_file_info: ~customizations.models.TestFileInfo + :ivar url_test_config_file_info: The config json file for url based test. + :vartype url_test_config_file_info: ~customizations.models.TestFileInfo + :ivar additional_file_info: Additional supported files for the test run. + :vartype additional_file_info: list[~customizations.models.TestFileInfo] + """ + + config_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="configFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test YAML file that contains the the test configuration.""" + test_script_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="testScriptFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test script file for the test run.""" + user_prop_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="userPropFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The user properties file.""" + input_artifacts_zip_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="inputArtifactsZipFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The zip file with all input artifacts.""" + url_test_config_file_info: Optional["_models.TestFileInfo"] = rest_field( + name="urlTestConfigFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The config json file for url based test.""" + additional_file_info: Optional[List["_models.TestFileInfo"]] = rest_field( + name="additionalFileInfo", visibility=["read"] + ) + """Additional supported files for the test run.""" + + @overload + def __init__( + self, + *, + config_file_info: Optional["_models.TestFileInfo"] = None, + test_script_file_info: Optional["_models.TestFileInfo"] = None, + user_prop_file_info: Optional["_models.TestFileInfo"] = None, + input_artifacts_zip_file_info: Optional["_models.TestFileInfo"] = None, + url_test_config_file_info: Optional["_models.TestFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfile(_model_base.Model): + """Test Profile Model. + + :ivar test_profile_id: Unique identifier for the test profile, must contain only lower-case + alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_profile_id: str + :ivar display_name: Display name of the test profile. + :vartype display_name: str + :ivar description: Description for the test profile. + :vartype description: str + :ivar test_id: Associated test ID for the test profile. This property is required for creating + a Test Profile and it's not allowed to be updated. + :vartype test_id: str + :ivar target_resource_id: Target resource ID on which the test profile is created. This + property is required for creating a Test Profile and it's not allowed to be updated. + :vartype target_resource_id: str + :ivar target_resource_configurations: Configurations of the target resource on which testing + would be done. + :vartype target_resource_configurations: ~customizations.models.TargetResourceConfigurations + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_profile_id: str = rest_field(name="testProfileId", visibility=["read"]) + """Unique identifier for the test profile, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of the test profile.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Description for the test profile.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read", "create"]) + """Associated test ID for the test profile. This property is required for creating a Test Profile + and it's not allowed to be updated.""" + target_resource_id: Optional[str] = rest_field(name="targetResourceId", visibility=["read", "create"]) + """Target resource ID on which the test profile is created. This property is required for creating + a Test Profile and it's not allowed to be updated.""" + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = rest_field( + name="targetResourceConfigurations", visibility=["read", "create", "update", "delete", "query"] + ) + """Configurations of the target resource on which testing would be done.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + display_name: Optional[str] = None, + description: Optional[str] = None, + test_id: Optional[str] = None, + target_resource_id: Optional[str] = None, + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfileRun(_model_base.Model): + """Test Profile Run model. + + :ivar test_profile_run_id: Unique identifier for the test profile run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_profile_run_id: str + :ivar display_name: Display name for the test profile run. + :vartype display_name: str + :ivar description: The test profile run description. + :vartype description: str + :ivar test_profile_id: Associated test profile ID for the test profile run. This is required to + create a test profile run and can't be updated. + :vartype test_profile_id: str + :ivar target_resource_id: Target resource ID on which the test profile run is created. + :vartype target_resource_id: str + :ivar target_resource_configurations: Configurations of the target resource on which the test + profile ran. + :vartype target_resource_configurations: ~customizations.models.TargetResourceConfigurations + :ivar status: The test profile run status. Known values are: "ACCEPTED", "NOTSTARTED", + "EXECUTING", "DONE", "CANCELLING", "CANCELLED", and "FAILED". + :vartype status: str or ~customizations.models.TestProfileRunStatus + :ivar error_details: Error details if there is any failure in test profile run. These errors + are specific to the Test Profile Run. + :vartype error_details: list[~customizations.models.ErrorDetails] + :ivar start_date_time: The test profile run start DateTime(RFC 3339 literal format). + :vartype start_date_time: ~datetime.datetime + :ivar end_date_time: The test profile run end DateTime(RFC 3339 literal format). + :vartype end_date_time: ~datetime.datetime + :ivar duration_in_seconds: Test profile run duration in seconds. + :vartype duration_in_seconds: int + :ivar test_run_details: Details of the test runs ran as part of the test profile run. + Key is the testRunId of the corresponding testRun. + :vartype test_run_details: dict[str, ~customizations.models.TestRunDetail] + :ivar recommendations: Recommendations provided based on a successful test profile run. + :vartype recommendations: list[~customizations.models.TestProfileRunRecommendation] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_profile_run_id: str = rest_field(name="testProfileRunId", visibility=["read"]) + """Unique identifier for the test profile run, must contain only lower-case alphabetic, numeric, + underscore or hyphen characters. Required.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name for the test profile run.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test profile run description.""" + test_profile_id: Optional[str] = rest_field(name="testProfileId", visibility=["read", "create"]) + """Associated test profile ID for the test profile run. This is required to create a test profile + run and can't be updated.""" + target_resource_id: Optional[str] = rest_field(name="targetResourceId", visibility=["read"]) + """Target resource ID on which the test profile run is created.""" + target_resource_configurations: Optional["_models.TargetResourceConfigurations"] = rest_field( + name="targetResourceConfigurations", visibility=["read"] + ) + """Configurations of the target resource on which the test profile ran.""" + status: Optional[Union[str, "_models.TestProfileRunStatus"]] = rest_field(visibility=["read"]) + """The test profile run status. Known values are: \"ACCEPTED\", \"NOTSTARTED\", \"EXECUTING\", + \"DONE\", \"CANCELLING\", \"CANCELLED\", and \"FAILED\".""" + error_details: Optional[List["_models.ErrorDetails"]] = rest_field(name="errorDetails", visibility=["read"]) + """Error details if there is any failure in test profile run. These errors are specific to the + Test Profile Run.""" + start_date_time: Optional[datetime.datetime] = rest_field( + name="startDateTime", visibility=["read"], format="rfc3339" + ) + """The test profile run start DateTime(RFC 3339 literal format).""" + end_date_time: Optional[datetime.datetime] = rest_field(name="endDateTime", visibility=["read"], format="rfc3339") + """The test profile run end DateTime(RFC 3339 literal format).""" + duration_in_seconds: Optional[int] = rest_field(name="durationInSeconds", visibility=["read"]) + """Test profile run duration in seconds.""" + test_run_details: Optional[Dict[str, "_models.TestRunDetail"]] = rest_field( + name="testRunDetails", visibility=["read"] + ) + """Details of the test runs ran as part of the test profile run. + Key is the testRunId of the corresponding testRun.""" + recommendations: Optional[List["_models.TestProfileRunRecommendation"]] = rest_field(visibility=["read"]) + """Recommendations provided based on a successful test profile run.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + display_name: Optional[str] = None, + description: Optional[str] = None, + test_profile_id: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestProfileRunRecommendation(_model_base.Model): + """A recommendation object that provides a list of configuration that optimizes its category. + + :ivar category: Category of the recommendation. Required. Known values are: + "ThroughputOptimized" and "CostOptimized". + :vartype category: str or ~customizations.models.RecommendationCategory + :ivar configurations: List of configurations IDs for which the recommendation is applicable. + These are a subset of the provided target resource configurations. + :vartype configurations: list[str] + """ + + category: Union[str, "_models.RecommendationCategory"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Category of the recommendation. Required. Known values are: \"ThroughputOptimized\" and + \"CostOptimized\".""" + configurations: Optional[List[str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """List of configurations IDs for which the recommendation is applicable. These are a subset of + the provided target resource configurations.""" + + @overload + def __init__( + self, + *, + category: Union[str, "_models.RecommendationCategory"], + configurations: Optional[List[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRun(_model_base.Model): + """Load test run model. + + :ivar test_run_id: Unique test run identifier for the load test run, must contain only + lower-case alphabetic, numeric, underscore or hyphen characters. Required. + :vartype test_run_id: str + :ivar pass_fail_criteria: Pass fail criteria for a test. + :vartype pass_fail_criteria: ~customizations.models.PassFailCriteria + :ivar auto_stop_criteria: Auto stop criteria for a test. This will automatically stop a load + test if the error percentage is high for a certain time window. + :vartype auto_stop_criteria: ~customizations.models.AutoStopCriteria + :ivar secrets: Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE. + :vartype secrets: dict[str, ~customizations.models.Secret] + :ivar certificate: Certificates metadata. + :vartype certificate: ~customizations.models.CertificateMetadata + :ivar environment_variables: Environment variables which are defined as a set of + pairs. + :vartype environment_variables: dict[str, str] + :ivar error_details: Error details if there is any failure in load test run. + :vartype error_details: list[~customizations.models.ErrorDetails] + :ivar test_run_statistics: Test run statistics. Key is the sampler name and value is the set of + statistics for performance metrics like response time, throughput, etc. from the load test run. + The sampler name is the same as the name mentioned in the test script. + Sampler name "Total" represents the aggregated statistics of all the samplers. + :vartype test_run_statistics: dict[str, ~customizations.models.TestRunStatistics] + :ivar regional_statistics: Regional statistics. Key is the Azure region name and value is the + test run statistics. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as "eastus". + The region name must match one of the strings in the "Name" column returned from running the + "az account list-locations -o table" Azure CLI command. + :vartype regional_statistics: dict[str, ~customizations.models.TestRunStatistics] + :ivar load_test_configuration: The load test configuration. + :vartype load_test_configuration: ~customizations.models.LoadTestConfiguration + :ivar test_artifacts: Collection of test run artifacts. + :vartype test_artifacts: ~customizations.models.TestRunArtifacts + :ivar test_result: Test result for pass/Fail criteria used during the test run. Known values + are: "PASSED", "NOT_APPLICABLE", and "FAILED". + :vartype test_result: str or ~customizations.models.PassFailTestResult + :ivar virtual_users: Number of virtual users, for which test has been run. + :vartype virtual_users: int + :ivar display_name: Display name of a testRun. + :vartype display_name: str + :ivar test_id: Associated test Id. + :vartype test_id: str + :ivar description: The test run description. + :vartype description: str + :ivar status: The test run status. Known values are: "ACCEPTED", "NOTSTARTED", "PROVISIONING", + "PROVISIONED", "CONFIGURING", "CONFIGURED", "EXECUTING", "EXECUTED", "DEPROVISIONING", + "DEPROVISIONED", "DONE", "CANCELLING", "CANCELLED", "FAILED", "VALIDATION_SUCCESS", and + "VALIDATION_FAILURE". + :vartype status: str or ~customizations.models.TestRunStatus + :ivar start_date_time: The test run start DateTime(RFC 3339 literal format). + :vartype start_date_time: ~datetime.datetime + :ivar end_date_time: The test run end DateTime(RFC 3339 literal format). + :vartype end_date_time: ~datetime.datetime + :ivar executed_date_time: Test run initiated time. + :vartype executed_date_time: ~datetime.datetime + :ivar portal_url: Portal url. + :vartype portal_url: str + :ivar duration: Test run duration in milliseconds. + :vartype duration: int + :ivar virtual_user_hours: Virtual user hours consumed by the test run. + :vartype virtual_user_hours: float + :ivar subnet_id: Subnet ID on which the load test instances should run. + :vartype subnet_id: str + :ivar kind: Type of test. Known values are: "URL", "JMX", and "Locust". + :vartype kind: str or ~customizations.models.TestKind + :ivar request_data_level: Request data collection level for test run. Known values are: "NONE" + and "ERRORS". + :vartype request_data_level: str or ~customizations.models.RequestDataLevel + :ivar debug_logs_enabled: Enable or disable debug level logging. True if debug logs are enabled + for the test run. False otherwise. + :vartype debug_logs_enabled: bool + :ivar public_ip_disabled: Inject load test engines without deploying public IP for outbound + access. + :vartype public_ip_disabled: bool + :ivar created_by_type: The type of the entity that created the test run. (E.x. User, + ScheduleTrigger, etc). Known values are: "User" and "ScheduledTrigger". + :vartype created_by_type: str or ~customizations.models.CreatedByType + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_run_id: str = rest_field(name="testRunId", visibility=["read"]) + """Unique test run identifier for the load test run, must contain only lower-case alphabetic, + numeric, underscore or hyphen characters. Required.""" + pass_fail_criteria: Optional["_models.PassFailCriteria"] = rest_field( + name="passFailCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Pass fail criteria for a test.""" + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = rest_field( + name="autoStopCriteria", visibility=["read", "create", "update", "delete", "query"] + ) + """Auto stop criteria for a test. This will automatically stop a load test if the error percentage + is high for a certain time window.""" + secrets: Optional[Dict[str, "_models.Secret"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Secrets can be stored in an Azure Key Vault or any other secret store. If the + secret is stored in an Azure Key Vault, the value should be the secret + identifier and the type should be AKV_SECRET_URI. If the secret is stored + elsewhere, the secret value should be provided directly and the type should be + SECRET_VALUE.""" + certificate: Optional["_models.CertificateMetadata"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Certificates metadata.""" + environment_variables: Optional[Dict[str, str]] = rest_field( + name="environmentVariables", visibility=["read", "create", "update", "delete", "query"] + ) + """Environment variables which are defined as a set of pairs.""" + error_details: Optional[List["_models.ErrorDetails"]] = rest_field(name="errorDetails", visibility=["read"]) + """Error details if there is any failure in load test run.""" + test_run_statistics: Optional[Dict[str, "_models.TestRunStatistics"]] = rest_field( + name="testRunStatistics", visibility=["read"] + ) + """Test run statistics. Key is the sampler name and value is the set of statistics for performance + metrics like response time, throughput, etc. from the load test run. + The sampler name is the same as the name mentioned in the test script. + Sampler name \"Total\" represents the aggregated statistics of all the samplers.""" + regional_statistics: Optional[Dict[str, "_models.TestRunStatistics"]] = rest_field( + name="regionalStatistics", visibility=["read"] + ) + """Regional statistics. Key is the Azure region name and value is the test run statistics. + The region name should of format accepted by ARM, and should be a region supported by Azure + Load Testing. For example, East US should be passed as \"eastus\". + The region name must match one of the strings in the \"Name\" column returned from running the + \"az account list-locations -o table\" Azure CLI command.""" + load_test_configuration: Optional["_models.LoadTestConfiguration"] = rest_field( + name="loadTestConfiguration", visibility=["read"] + ) + """The load test configuration.""" + test_artifacts: Optional["_models.TestRunArtifacts"] = rest_field(name="testArtifacts", visibility=["read"]) + """Collection of test run artifacts.""" + test_result: Optional[Union[str, "_models.PassFailTestResult"]] = rest_field(name="testResult", visibility=["read"]) + """Test result for pass/Fail criteria used during the test run. Known values are: \"PASSED\", + \"NOT_APPLICABLE\", and \"FAILED\".""" + virtual_users: Optional[int] = rest_field(name="virtualUsers", visibility=["read"]) + """Number of virtual users, for which test has been run.""" + display_name: Optional[str] = rest_field( + name="displayName", visibility=["read", "create", "update", "delete", "query"] + ) + """Display name of a testRun.""" + test_id: Optional[str] = rest_field(name="testId", visibility=["read", "create", "update", "delete", "query"]) + """Associated test Id.""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The test run description.""" + status: Optional[Union[str, "_models.TestRunStatus"]] = rest_field(visibility=["read"]) + """The test run status. Known values are: \"ACCEPTED\", \"NOTSTARTED\", \"PROVISIONING\", + \"PROVISIONED\", \"CONFIGURING\", \"CONFIGURED\", \"EXECUTING\", \"EXECUTED\", + \"DEPROVISIONING\", \"DEPROVISIONED\", \"DONE\", \"CANCELLING\", \"CANCELLED\", \"FAILED\", + \"VALIDATION_SUCCESS\", and \"VALIDATION_FAILURE\".""" + start_date_time: Optional[datetime.datetime] = rest_field( + name="startDateTime", visibility=["read"], format="rfc3339" + ) + """The test run start DateTime(RFC 3339 literal format).""" + end_date_time: Optional[datetime.datetime] = rest_field(name="endDateTime", visibility=["read"], format="rfc3339") + """The test run end DateTime(RFC 3339 literal format).""" + executed_date_time: Optional[datetime.datetime] = rest_field( + name="executedDateTime", visibility=["read"], format="rfc3339" + ) + """Test run initiated time.""" + portal_url: Optional[str] = rest_field(name="portalUrl", visibility=["read"]) + """Portal url.""" + duration: Optional[int] = rest_field(visibility=["read"]) + """Test run duration in milliseconds.""" + virtual_user_hours: Optional[float] = rest_field(name="virtualUserHours", visibility=["read"]) + """Virtual user hours consumed by the test run.""" + subnet_id: Optional[str] = rest_field(name="subnetId", visibility=["read"]) + """Subnet ID on which the load test instances should run.""" + kind: Optional[Union[str, "_models.TestKind"]] = rest_field(visibility=["read"]) + """Type of test. Known values are: \"URL\", \"JMX\", and \"Locust\".""" + request_data_level: Optional[Union[str, "_models.RequestDataLevel"]] = rest_field( + name="requestDataLevel", visibility=["read", "create", "update", "delete", "query"] + ) + """Request data collection level for test run. Known values are: \"NONE\" and \"ERRORS\".""" + debug_logs_enabled: Optional[bool] = rest_field( + name="debugLogsEnabled", visibility=["read", "create", "update", "delete", "query"] + ) + """Enable or disable debug level logging. True if debug logs are enabled for the test run. False + otherwise.""" + public_ip_disabled: Optional[bool] = rest_field(name="publicIPDisabled", visibility=["read"]) + """Inject load test engines without deploying public IP for outbound access.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of the entity that created the test run. (E.x. User, ScheduleTrigger, etc). Known + values are: \"User\" and \"ScheduledTrigger\".""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( # pylint: disable=too-many-locals + self, + *, + pass_fail_criteria: Optional["_models.PassFailCriteria"] = None, + auto_stop_criteria: Optional["_models.AutoStopCriteria"] = None, + secrets: Optional[Dict[str, "_models.Secret"]] = None, + certificate: Optional["_models.CertificateMetadata"] = None, + environment_variables: Optional[Dict[str, str]] = None, + display_name: Optional[str] = None, + test_id: Optional[str] = None, + description: Optional[str] = None, + request_data_level: Optional[Union[str, "_models.RequestDataLevel"]] = None, + debug_logs_enabled: Optional[bool] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunAppComponents(_model_base.Model): + """Test run app component. + + :ivar components: Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required. + :vartype components: dict[str, ~customizations.models.AppComponent] + :ivar test_run_id: Test run identifier. + :vartype test_run_id: str + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + components: Dict[str, "_models.AppComponent"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource collection { resource id (fully qualified resource Id e.g + subscriptions/{subId}/resourceGroups/{rg}/providers/Microsoft.LoadTestService/loadtests/{resName}) + : resource object }. Required.""" + test_run_id: Optional[str] = rest_field(name="testRunId", visibility=["read"]) + """Test run identifier.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + components: Dict[str, "_models.AppComponent"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunArtifacts(_model_base.Model): + """Collection of test run artifacts. + + :ivar input_artifacts: The input artifacts for the test run. + :vartype input_artifacts: ~customizations.models.TestRunInputArtifacts + :ivar output_artifacts: The output artifacts for the test run. + :vartype output_artifacts: ~customizations.models.TestRunOutputArtifacts + """ + + input_artifacts: Optional["_models.TestRunInputArtifacts"] = rest_field(name="inputArtifacts", visibility=["read"]) + """The input artifacts for the test run.""" + output_artifacts: Optional["_models.TestRunOutputArtifacts"] = rest_field( + name="outputArtifacts", visibility=["read", "create", "update", "delete", "query"] + ) + """The output artifacts for the test run.""" + + @overload + def __init__( + self, + *, + output_artifacts: Optional["_models.TestRunOutputArtifacts"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunDetail(_model_base.Model): + """Details of a particular test run for a test profile run. + + :ivar status: Status of the test run. Required. Known values are: "ACCEPTED", "NOTSTARTED", + "PROVISIONING", "PROVISIONED", "CONFIGURING", "CONFIGURED", "EXECUTING", "EXECUTED", + "DEPROVISIONING", "DEPROVISIONED", "DONE", "CANCELLING", "CANCELLED", "FAILED", + "VALIDATION_SUCCESS", and "VALIDATION_FAILURE". + :vartype status: str or ~customizations.models.TestRunStatus + :ivar configuration_id: ID of the configuration on which the test ran. Required. + :vartype configuration_id: str + :ivar properties: Key value pair of extra properties associated with the test run. Required. + :vartype properties: dict[str, str] + """ + + status: Union[str, "_models.TestRunStatus"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Status of the test run. Required. Known values are: \"ACCEPTED\", \"NOTSTARTED\", + \"PROVISIONING\", \"PROVISIONED\", \"CONFIGURING\", \"CONFIGURED\", \"EXECUTING\", + \"EXECUTED\", \"DEPROVISIONING\", \"DEPROVISIONED\", \"DONE\", \"CANCELLING\", \"CANCELLED\", + \"FAILED\", \"VALIDATION_SUCCESS\", and \"VALIDATION_FAILURE\".""" + configuration_id: str = rest_field( + name="configurationId", visibility=["read", "create", "update", "delete", "query"] + ) + """ID of the configuration on which the test ran. Required.""" + properties: Dict[str, str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Key value pair of extra properties associated with the test run. Required.""" + + @overload + def __init__( + self, + *, + status: Union[str, "_models.TestRunStatus"], + configuration_id: str, + properties: Dict[str, str], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunFileInfo(_model_base.Model): + """Test run file info. + + :ivar file_name: Name of the file. Required. + :vartype file_name: str + :ivar url: File URL. + :vartype url: str + :ivar file_type: File type. Known values are: "JMX_FILE", "USER_PROPERTIES", + "ADDITIONAL_ARTIFACTS", "ZIPPED_ARTIFACTS", "URL_TEST_CONFIG", and "TEST_SCRIPT". + :vartype file_type: str or ~customizations.models.FileType + :ivar expire_date_time: Expiry time of the file (RFC 3339 literal format). + :vartype expire_date_time: ~datetime.datetime + :ivar validation_status: Validation status of the file. Known values are: "NOT_VALIDATED", + "VALIDATION_SUCCESS", "VALIDATION_FAILURE", "VALIDATION_INITIATED", and + "VALIDATION_NOT_REQUIRED". + :vartype validation_status: str or ~customizations.models.FileValidationStatus + :ivar validation_failure_details: Validation failure error details. + :vartype validation_failure_details: str + """ + + file_name: str = rest_field(name="fileName", visibility=["read", "create", "update", "delete", "query"]) + """Name of the file. Required.""" + url: Optional[str] = rest_field(visibility=["read"]) + """File URL.""" + file_type: Optional[Union[str, "_models.FileType"]] = rest_field(name="fileType", visibility=["read"]) + """File type. Known values are: \"JMX_FILE\", \"USER_PROPERTIES\", \"ADDITIONAL_ARTIFACTS\", + \"ZIPPED_ARTIFACTS\", \"URL_TEST_CONFIG\", and \"TEST_SCRIPT\".""" + expire_date_time: Optional[datetime.datetime] = rest_field( + name="expireDateTime", visibility=["read"], format="rfc3339" + ) + """Expiry time of the file (RFC 3339 literal format).""" + validation_status: Optional[Union[str, "_models.FileValidationStatus"]] = rest_field( + name="validationStatus", visibility=["read"] + ) + """Validation status of the file. Known values are: \"NOT_VALIDATED\", \"VALIDATION_SUCCESS\", + \"VALIDATION_FAILURE\", \"VALIDATION_INITIATED\", and \"VALIDATION_NOT_REQUIRED\".""" + validation_failure_details: Optional[str] = rest_field(name="validationFailureDetails", visibility=["read"]) + """Validation failure error details.""" + + @overload + def __init__( + self, + *, + file_name: str, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunInputArtifacts(_model_base.Model): + """The input artifacts for the test run. + + :ivar config_file_info: The load test YAML file that contains the the test configuration. + :vartype config_file_info: ~customizations.models.TestRunFileInfo + :ivar test_script_file_info: The test script file for the test run. + :vartype test_script_file_info: ~customizations.models.TestRunFileInfo + :ivar user_prop_file_info: The user properties file. + :vartype user_prop_file_info: ~customizations.models.TestRunFileInfo + :ivar input_artifacts_zip_file_info: The zip file for all input artifacts. + :vartype input_artifacts_zip_file_info: ~customizations.models.TestRunFileInfo + :ivar url_test_config_file_info: The config json file for url based test. + :vartype url_test_config_file_info: ~customizations.models.TestRunFileInfo + :ivar additional_file_info: Additional supported files for the test run. + :vartype additional_file_info: list[~customizations.models.TestRunFileInfo] + """ + + config_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="configFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The load test YAML file that contains the the test configuration.""" + test_script_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="testScriptFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test script file for the test run.""" + user_prop_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="userPropFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The user properties file.""" + input_artifacts_zip_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="inputArtifactsZipFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The zip file for all input artifacts.""" + url_test_config_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="urlTestConfigFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The config json file for url based test.""" + additional_file_info: Optional[List["_models.TestRunFileInfo"]] = rest_field( + name="additionalFileInfo", visibility=["read"] + ) + """Additional supported files for the test run.""" + + @overload + def __init__( + self, + *, + config_file_info: Optional["_models.TestRunFileInfo"] = None, + test_script_file_info: Optional["_models.TestRunFileInfo"] = None, + user_prop_file_info: Optional["_models.TestRunFileInfo"] = None, + input_artifacts_zip_file_info: Optional["_models.TestRunFileInfo"] = None, + url_test_config_file_info: Optional["_models.TestRunFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunOutputArtifacts(_model_base.Model): + """The output artifacts for the test run. + + :ivar result_file_info: The test run results file. + :vartype result_file_info: ~customizations.models.TestRunFileInfo + :ivar logs_file_info: The test run report with metrics. + :vartype logs_file_info: ~customizations.models.TestRunFileInfo + :ivar artifacts_container_info: The container for test run artifacts. + :vartype artifacts_container_info: ~customizations.models.ArtifactsContainerInfo + :ivar report_file_info: The report file for the test run. + :vartype report_file_info: ~customizations.models.TestRunFileInfo + """ + + result_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="resultFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test run results file.""" + logs_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="logsFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The test run report with metrics.""" + artifacts_container_info: Optional["_models.ArtifactsContainerInfo"] = rest_field( + name="artifactsContainerInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The container for test run artifacts.""" + report_file_info: Optional["_models.TestRunFileInfo"] = rest_field( + name="reportFileInfo", visibility=["read", "create", "update", "delete", "query"] + ) + """The report file for the test run.""" + + @overload + def __init__( + self, + *, + result_file_info: Optional["_models.TestRunFileInfo"] = None, + logs_file_info: Optional["_models.TestRunFileInfo"] = None, + artifacts_container_info: Optional["_models.ArtifactsContainerInfo"] = None, + report_file_info: Optional["_models.TestRunFileInfo"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunServerMetricsConfiguration(_model_base.Model): + """Test run server metrics configuration. + + :ivar test_run_id: Test run identifier. + :vartype test_run_id: str + :ivar metrics: Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). + :vartype metrics: dict[str, ~customizations.models.ResourceMetric] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_run_id: Optional[str] = rest_field(name="testRunId", visibility=["read"]) + """Test run identifier.""" + metrics: Optional[Dict[str, "_models.ResourceMetric"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id).""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + metrics: Optional[Dict[str, "_models.ResourceMetric"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TestRunStatistics(_model_base.Model): + """Test run statistics. + + :ivar transaction: Transaction name. + :vartype transaction: str + :ivar sample_count: Sampler count. + :vartype sample_count: float + :ivar error_count: Error count. + :vartype error_count: float + :ivar error_pct: Error percentage. + :vartype error_pct: float + :ivar mean_res_time: Mean response time. + :vartype mean_res_time: float + :ivar median_res_time: Median response time. + :vartype median_res_time: float + :ivar max_res_time: Max response time. + :vartype max_res_time: float + :ivar min_res_time: Minimum response time. + :vartype min_res_time: float + :ivar pct1_res_time: 90 percentile response time. + :vartype pct1_res_time: float + :ivar pct2_res_time: 95 percentile response time. + :vartype pct2_res_time: float + :ivar pct3_res_time: 99 percentile response time. + :vartype pct3_res_time: float + :ivar pct75_res_time: 75 percentile response time. + :vartype pct75_res_time: float + :ivar pct96_res_time: 96 percentile response time. + :vartype pct96_res_time: float + :ivar pct97_res_time: 97 percentile response time. + :vartype pct97_res_time: float + :ivar pct98_res_time: 98 percentile response time. + :vartype pct98_res_time: float + :ivar pct999_res_time: 99.9 percentile response time. + :vartype pct999_res_time: float + :ivar pct9999_res_time: 99.99 percentile response time. + :vartype pct9999_res_time: float + :ivar throughput: Throughput. + :vartype throughput: float + :ivar received_k_bytes_per_sec: Received network bytes. + :vartype received_k_bytes_per_sec: float + :ivar sent_k_bytes_per_sec: Send network bytes. + :vartype sent_k_bytes_per_sec: float + """ + + transaction: Optional[str] = rest_field(visibility=["read"]) + """Transaction name.""" + sample_count: Optional[float] = rest_field(name="sampleCount", visibility=["read"]) + """Sampler count.""" + error_count: Optional[float] = rest_field(name="errorCount", visibility=["read"]) + """Error count.""" + error_pct: Optional[float] = rest_field(name="errorPct", visibility=["read"]) + """Error percentage.""" + mean_res_time: Optional[float] = rest_field(name="meanResTime", visibility=["read"]) + """Mean response time.""" + median_res_time: Optional[float] = rest_field(name="medianResTime", visibility=["read"]) + """Median response time.""" + max_res_time: Optional[float] = rest_field(name="maxResTime", visibility=["read"]) + """Max response time.""" + min_res_time: Optional[float] = rest_field(name="minResTime", visibility=["read"]) + """Minimum response time.""" + pct1_res_time: Optional[float] = rest_field(name="pct1ResTime", visibility=["read"]) + """90 percentile response time.""" + pct2_res_time: Optional[float] = rest_field(name="pct2ResTime", visibility=["read"]) + """95 percentile response time.""" + pct3_res_time: Optional[float] = rest_field(name="pct3ResTime", visibility=["read"]) + """99 percentile response time.""" + pct75_res_time: Optional[float] = rest_field(name="pct75ResTime", visibility=["read"]) + """75 percentile response time.""" + pct96_res_time: Optional[float] = rest_field(name="pct96ResTime", visibility=["read"]) + """96 percentile response time.""" + pct97_res_time: Optional[float] = rest_field(name="pct97ResTime", visibility=["read"]) + """97 percentile response time.""" + pct98_res_time: Optional[float] = rest_field(name="pct98ResTime", visibility=["read"]) + """98 percentile response time.""" + pct999_res_time: Optional[float] = rest_field(name="pct999ResTime", visibility=["read"]) + """99.9 percentile response time.""" + pct9999_res_time: Optional[float] = rest_field(name="pct9999ResTime", visibility=["read"]) + """99.99 percentile response time.""" + throughput: Optional[float] = rest_field(visibility=["read"]) + """Throughput.""" + received_k_bytes_per_sec: Optional[float] = rest_field(name="receivedKBytesPerSec", visibility=["read"]) + """Received network bytes.""" + sent_k_bytes_per_sec: Optional[float] = rest_field(name="sentKBytesPerSec", visibility=["read"]) + """Send network bytes.""" + + +class TestServerMetricsConfiguration(_model_base.Model): + """Test server metrics configuration. + + :ivar test_id: Test identifier. + :vartype test_id: str + :ivar metrics: Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). Required. + :vartype metrics: dict[str, ~customizations.models.ResourceMetric] + :ivar created_date_time: The creation datetime(RFC 3339 literal format). + :vartype created_date_time: ~datetime.datetime + :ivar created_by: The user that created. + :vartype created_by: str + :ivar last_modified_date_time: The last Modified datetime(RFC 3339 literal format). + :vartype last_modified_date_time: ~datetime.datetime + :ivar last_modified_by: The user that last modified. + :vartype last_modified_by: str + """ + + test_id: Optional[str] = rest_field(name="testId", visibility=["read"]) + """Test identifier.""" + metrics: Dict[str, "_models.ResourceMetric"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Azure resource metrics collection {metric id : metrics object} (Refer : + `https://learn.microsoft.com/en-us/rest/api/monitor/metric-definitions/list#metricdefinition + `_ + for metric id). Required.""" + created_date_time: Optional[datetime.datetime] = rest_field( + name="createdDateTime", visibility=["read"], format="rfc3339" + ) + """The creation datetime(RFC 3339 literal format).""" + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read"]) + """The user that created.""" + last_modified_date_time: Optional[datetime.datetime] = rest_field( + name="lastModifiedDateTime", visibility=["read"], format="rfc3339" + ) + """The last Modified datetime(RFC 3339 literal format).""" + last_modified_by: Optional[str] = rest_field(name="lastModifiedBy", visibility=["read"]) + """The user that last modified.""" + + @overload + def __init__( + self, + *, + metrics: Dict[str, "_models.ResourceMetric"], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class TimeSeriesElement(_model_base.Model): + """The time series returned when a data query is performed. + + :ivar data: An array of data points representing the metric values. + :vartype data: list[~customizations.models.MetricValue] + :ivar dimension_values: The dimension values. + :vartype dimension_values: list[~customizations.models.DimensionValue] + """ + + data: Optional[List["_models.MetricValue"]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """An array of data points representing the metric values.""" + dimension_values: Optional[List["_models.DimensionValue"]] = rest_field( + name="dimensionValues", visibility=["read", "create", "update", "delete", "query"] + ) + """The dimension values.""" + + @overload + def __init__( + self, + *, + data: Optional[List["_models.MetricValue"]] = None, + dimension_values: Optional[List["_models.DimensionValue"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py new file mode 100644 index 000000000000..8bcb627aa475 --- /dev/null +++ b/sdk/loadtesting/azure-developer-loadtesting/microsoft/loadtestservice/models/_patch.py @@ -0,0 +1,21 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py index 79adb48c202b..8cfc28a1de29 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_app_components_test.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py index 36b793be030d..402294d5b89d 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py index 6d19b26a3aca..b49976147e7c 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/create_or_update_test_profile.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for @@ -59,9 +60,9 @@ "config2": { "instanceMemoryMB": 4096, "httpConcurrency": 100, - } - } - } + }, + }, + }, }, ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py b/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py index ed8380e6ca39..1c8c7fbe5d0e 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py +++ b/sdk/loadtesting/azure-developer-loadtesting/samples/upload_test_file.py @@ -1,3 +1,4 @@ +# pylint: disable=line-too-long,useless-suppression # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for diff --git a/sdk/loadtesting/azure-developer-loadtesting/setup.py b/sdk/loadtesting/azure-developer-loadtesting/setup.py index 0d0752f9040b..258e59b98537 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/setup.py +++ b/sdk/loadtesting/azure-developer-loadtesting/setup.py @@ -5,7 +5,7 @@ # Code generated by Microsoft (R) Python Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -# coding: utf-8 + import os import re @@ -29,7 +29,7 @@ setup( name=PACKAGE_NAME, version=version, - description="Microsoft {} Client Library for Python".format(PACKAGE_PPRINT_NAME), + description="Microsoft Corporation {} Client Library for Python".format(PACKAGE_PPRINT_NAME), long_description=open("README.md", "r").read(), long_description_content_type="text/markdown", license="MIT License", @@ -60,7 +60,7 @@ ), include_package_data=True, package_data={ - "azure.developer.loadtesting": ["py.typed"], + "customizations": ["py.typed"], }, install_requires=[ "isodate>=0.6.1", diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py index 655cfce51f17..69232f7a0183 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_administration_ops.py @@ -16,6 +16,7 @@ DISPLAY_NAME = "TestingResourcePyTest" + class TestLoadTestAdministrationOperations(LoadTestingAsyncTest): @LoadTestingPreparer() @@ -36,8 +37,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -49,7 +60,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None await self.close_admin_client() @@ -137,7 +149,7 @@ async def test_delete_test_file(self, loadtesting_endpoint, loadtesting_test_id) assert result is None await self.close_admin_client() - + @LoadTestingPreparer() @recorded_by_proxy_async @pytest.mark.asyncio @@ -230,6 +242,7 @@ async def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id) await self.close_admin_client() + class TestTestProfileAdministrationOperations(LoadTestingAsyncTest): @LoadTestingPreparer() @@ -250,8 +263,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -263,7 +286,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None await self.close_admin_client() @@ -283,7 +307,9 @@ async def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy_async @pytest.mark.asyncio - async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + async def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -297,16 +323,10 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) @@ -318,7 +338,7 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes @pytest.mark.asyncio async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): set_bodiless_matcher() - + client = self.create_administration_client(loadtesting_endpoint) result = await client.get_test_profile(loadtesting_test_profile_id) assert result is not None @@ -330,7 +350,7 @@ async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_pro @pytest.mark.asyncio async def test_list_test_profiles(self, loadtesting_endpoint): set_bodiless_matcher() - + client = self.create_administration_client(loadtesting_endpoint) result = client.list_test_profiles() assert result is not None diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py index a35c670296df..c00da04555ee 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_async_load_test_run_ops.py @@ -17,6 +17,7 @@ DISPLAY_NAME = "TestingResourcePyTest" NON_EXISTING_RESOURCE = "nonexistingresource" + class TestLoadTestRunOperations(LoadTestingAsyncTest): # Pre-requisite: Test creation is needed for test run related tests @@ -37,8 +38,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -50,7 +61,8 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -66,7 +78,7 @@ async def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): assert result is not None await self.close_admin_client() - + @LoadTestingPreparer() @recorded_by_proxy_async async def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -154,7 +166,7 @@ async def test_list_test_runs(self, loadtesting_endpoint): result = run_client.list_test_runs() assert result is not None items = [item async for item in result] - assert len(items) > 0 # Atleast one item in the page + assert len(items) > 0 # Atleast one item in the page await self.close_run_client() @@ -217,9 +229,7 @@ async def test_create_or_update_app_component( @LoadTestingPreparer() @recorded_by_proxy_async - async def test_get_app_component( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + async def test_get_app_component(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -260,9 +270,7 @@ async def test_create_or_update_server_metrics_config( @LoadTestingPreparer() @recorded_by_proxy_async - async def test_get_server_metrics_config( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + async def test_get_server_metrics_config(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -302,7 +310,7 @@ async def test_delete_test_run(self, loadtesting_endpoint, loadtesting_test_run_ assert result is None await self.close_run_client() - + @LoadTestingPreparer() @recorded_by_proxy_async async def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -315,6 +323,7 @@ async def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): await self.close_admin_client() + class TestTestProfileRunOperations(LoadTestingAsyncTest): # Pre-requisite: Test & Test Profile creation is needed for test profile run related tests @@ -335,8 +344,18 @@ async def test_create_or_update_load_test(self, loadtesting_endpoint, loadtestin }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -398,7 +417,9 @@ async def test_get_test_file(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy_async - async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + async def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -412,16 +433,10 @@ async def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtes "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None @@ -441,7 +456,9 @@ async def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_pro @LoadTestingPreparer() @recorded_by_proxy_async - async def test_begin_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id): + async def test_begin_test_profile_run( + self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id + ): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py index 22b217d8200e..25ee309e1019 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_administration_ops.py @@ -14,6 +14,7 @@ DISPLAY_NAME = "TestingResourcePyTest" + class TestLoadTestAdministrationOperations(LoadTestingTest): @LoadTestingPreparer() @@ -33,8 +34,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -46,7 +57,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -110,7 +122,8 @@ def list_test_files(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy def test_create_or_update_app_components( - self, loadtesting_endpoint, loadtesting_test_id, loadtesting_app_component_id): + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_app_component_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -181,7 +194,7 @@ def test_delete_test_file(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.delete_test_file(loadtesting_test_id, "sample.jmx") assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -191,6 +204,7 @@ def test_delete_load_test(self, loadtesting_endpoint, loadtesting_test_id): result = client.delete_test(loadtesting_test_id) assert result is None + class TestTestProfileAdministrationOperations(LoadTestingTest): # Pre-requisite: Test creation is needed for test profile related tests @@ -211,8 +225,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -224,7 +248,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -239,7 +264,9 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy - def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -253,20 +280,14 @@ def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_t "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -286,7 +307,7 @@ def test_list_test_profiles(self, loadtesting_endpoint): assert result is not None items = [r for r in result] assert len(items) > 0 # page has atleast one item - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py index be5b5d5efa29..ab60ea9c712b 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/test_load_test_run_ops.py @@ -35,8 +35,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -48,7 +58,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -60,7 +71,7 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.get_test(loadtesting_test_id) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -138,7 +149,7 @@ def test_list_test_runs(self, loadtesting_endpoint, loadtesting_test_id, loadtes result = run_client.list_test_runs() assert result is not None items = [item for item in result] - assert len(items) > 0 # Atleast one item in the page + assert len(items) > 0 # Atleast one item in the page @LoadTestingPreparer() @recorded_by_proxy @@ -195,9 +206,7 @@ def test_create_or_update_app_component( @LoadTestingPreparer() @recorded_by_proxy - def test_get_app_component( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + def test_get_app_component(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -234,9 +243,7 @@ def test_create_or_update_server_metrics_config( @LoadTestingPreparer() @recorded_by_proxy - def test_get_server_metrics_config( - self, loadtesting_endpoint, loadtesting_test_run_id - ): + def test_get_server_metrics_config(self, loadtesting_endpoint, loadtesting_test_run_id): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -275,7 +282,7 @@ def test_delete_test_run(self, loadtesting_endpoint, loadtesting_test_run_id): result = run_client.delete_test_run(loadtesting_test_run_id) assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): @@ -286,6 +293,7 @@ def test_delete_test(self, loadtesting_endpoint, loadtesting_test_id): result = client.delete_test(loadtesting_test_id) assert result is None + class TestTestProfileRunOperations(LoadTestingTest): # Pre-requisite: Test & Test Profile creation is needed for test profile run related tests @@ -306,8 +314,18 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "passFailCriteria": { "passFailMetrics": { - "condition1": {"clientmetric": "response_time_ms", "aggregate": "avg", "condition": ">", "value": 300}, - "condition2": {"clientmetric": "error", "aggregate": "percentage", "condition": ">", "value": 50}, + "condition1": { + "clientmetric": "response_time_ms", + "aggregate": "avg", + "condition": ">", + "value": 300, + }, + "condition2": { + "clientmetric": "error", + "aggregate": "percentage", + "condition": ">", + "value": 50, + }, "condition3": { "clientmetric": "latency", "aggregate": "avg", @@ -319,7 +337,8 @@ def test_create_or_update_load_test(self, loadtesting_endpoint, loadtesting_test }, "secrets": {}, "environmentVariables": {"my-variable": "value"}, - }) + }, + ) assert result is not None @@ -331,7 +350,7 @@ def test_get_load_test(self, loadtesting_endpoint, loadtesting_test_id): client = self.create_administration_client(loadtesting_endpoint) result = client.get_test(loadtesting_test_id) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_upload_test_file(self, loadtesting_endpoint, loadtesting_test_id): @@ -360,7 +379,9 @@ def test_get_test_file(self, loadtesting_endpoint, loadtesting_test_id): @LoadTestingPreparer() @recorded_by_proxy - def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id): + def test_create_or_update_test_profile( + self, loadtesting_endpoint, loadtesting_test_id, loadtesting_test_profile_id, loadtesting_target_resource_id + ): set_bodiless_matcher() client = self.create_administration_client(loadtesting_endpoint) @@ -374,20 +395,14 @@ def test_create_or_update_test_profile(self, loadtesting_endpoint, loadtesting_t "targetResourceConfigurations": { "kind": "FunctionsFlexConsumption", "configurations": { - "config1": { - "instanceMemoryMB": 2048, - "httpConcurrency": 20 - }, - "config2": { - "instanceMemoryMB": 4096, - "httpConcurrency": 100 - }, - } - } + "config1": {"instanceMemoryMB": 2048, "httpConcurrency": 20}, + "config2": {"instanceMemoryMB": 4096, "httpConcurrency": 100}, + }, + }, }, ) assert result is not None - + @LoadTestingPreparer() @recorded_by_proxy def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -399,7 +414,9 @@ def test_get_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_i @LoadTestingPreparer() @recorded_by_proxy - def test_begin_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id): + def test_begin_test_profile_run( + self, loadtesting_endpoint, loadtesting_test_profile_id, loadtesting_test_profile_run_id + ): set_bodiless_matcher() run_client = self.create_run_client(loadtesting_endpoint) @@ -428,7 +445,7 @@ def test_get_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profi result = run_client.get_test_profile_run(loadtesting_test_profile_run_id) assert result is not None assert len(result["recommendations"]) > 0 - + @LoadTestingPreparer() @recorded_by_proxy def test_stop_test_profile_run(self, loadtesting_endpoint, loadtesting_test_profile_id): @@ -460,7 +477,7 @@ def test_delete_test_profile_run(self, loadtesting_endpoint, loadtesting_test_pr result = run_client.delete_test_profile_run(loadtesting_test_profile_run_id) assert result is None - + @LoadTestingPreparer() @recorded_by_proxy def test_delete_test_profile(self, loadtesting_endpoint, loadtesting_test_profile_id): diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py index 08c5ecb92644..df4dc143934d 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase.py @@ -38,5 +38,5 @@ def create_run_client(self, endpoint) -> LoadTestRunClient: loadtesting_app_component_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRG/providers/Microsoft.Web/sites/contoso-sampleapp", loadtesting_test_profile_id="some-test-profile-id", loadtesting_target_resource_id="/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRG/providers/Microsoft.Web/sites/myFlexFunction", - loadtesting_test_profile_run_id="some-test-profile-run-id" + loadtesting_test_profile_run_id="some-test-profile-run-id", ) diff --git a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py index e7ad3bfcd12c..7e256725c64a 100644 --- a/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py +++ b/sdk/loadtesting/azure-developer-loadtesting/tests/testcase_async.py @@ -16,7 +16,7 @@ def create_administration_client(self, endpoint) -> LoadTestAdministrationClient credential=self.admin_credential, endpoint=endpoint, ) - + return self.admin_client def create_run_client(self, endpoint) -> LoadTestRunClient: @@ -28,11 +28,11 @@ def create_run_client(self, endpoint) -> LoadTestRunClient: ) return self.run_client - + async def close_admin_client(self): await self.admin_credential.close() await self.admin_client.close() - + async def close_run_client(self): await self.run_credential.close() await self.run_client.close()